repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/fineract | 35,696 | fineract-provider/src/main/java/org/apache/fineract/portfolio/loanaccount/guarantor/service/GuarantorDomainServiceImpl.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.loanaccount.guarantor.service;
import jakarta.annotation.PostConstruct;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import lombok.RequiredArgsConstructor;
import org.apache.fineract.infrastructure.configuration.domain.ConfigurationDomainService;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.domain.ExternalId;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.service.DateUtils;
import org.apache.fineract.infrastructure.core.service.ExternalIdFactory;
import org.apache.fineract.infrastructure.event.business.BusinessEventListener;
import org.apache.fineract.infrastructure.event.business.domain.loan.LoanAdjustTransactionBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.LoanApprovedBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.LoanUndoApprovalBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.LoanUndoDisbursalBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.transaction.LoanTransactionMakeRepaymentPostBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.transaction.LoanUndoWrittenOffBusinessEvent;
import org.apache.fineract.infrastructure.event.business.domain.loan.transaction.LoanWrittenOffPostBusinessEvent;
import org.apache.fineract.infrastructure.event.business.service.BusinessEventNotifierService;
import org.apache.fineract.organisation.monetary.domain.MoneyHelper;
import org.apache.fineract.portfolio.account.PortfolioAccountType;
import org.apache.fineract.portfolio.account.data.AccountTransferDTO;
import org.apache.fineract.portfolio.account.domain.AccountTransferDetails;
import org.apache.fineract.portfolio.account.domain.AccountTransferType;
import org.apache.fineract.portfolio.account.service.AccountTransfersWritePlatformService;
import org.apache.fineract.portfolio.loanaccount.domain.Loan;
import org.apache.fineract.portfolio.loanaccount.domain.LoanRepository;
import org.apache.fineract.portfolio.loanaccount.domain.LoanTransaction;
import org.apache.fineract.portfolio.loanaccount.domain.LoanTransactionRepository;
import org.apache.fineract.portfolio.loanaccount.guarantor.GuarantorConstants;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.Guarantor;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.GuarantorFundingDetails;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.GuarantorFundingRepository;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.GuarantorFundingTransaction;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.GuarantorFundingTransactionRepository;
import org.apache.fineract.portfolio.loanaccount.guarantor.domain.GuarantorRepository;
import org.apache.fineract.portfolio.loanproduct.domain.LoanProduct;
import org.apache.fineract.portfolio.loanproduct.domain.LoanProductGuaranteeDetails;
import org.apache.fineract.portfolio.paymentdetail.domain.PaymentDetail;
import org.apache.fineract.portfolio.savings.domain.DepositAccountOnHoldTransaction;
import org.apache.fineract.portfolio.savings.domain.DepositAccountOnHoldTransactionRepository;
import org.apache.fineract.portfolio.savings.domain.SavingsAccount;
import org.apache.fineract.portfolio.savings.domain.SavingsAccountAssembler;
import org.apache.fineract.portfolio.savings.domain.SavingsAccountTransaction;
import org.apache.fineract.portfolio.savings.exception.InsufficientAccountBalanceException;
import org.springframework.stereotype.Service;
@Service
@RequiredArgsConstructor
public class GuarantorDomainServiceImpl implements GuarantorDomainService {
private final GuarantorRepository guarantorRepository;
private final GuarantorFundingRepository guarantorFundingRepository;
private final GuarantorFundingTransactionRepository guarantorFundingTransactionRepository;
private final AccountTransfersWritePlatformService accountTransfersWritePlatformService;
private final BusinessEventNotifierService businessEventNotifierService;
private final DepositAccountOnHoldTransactionRepository depositAccountOnHoldTransactionRepository;
private final Map<Long, Long> releaseLoanIds = new HashMap<>(2);
private final SavingsAccountAssembler savingsAccountAssembler;
private final ConfigurationDomainService configurationDomainService;
private final ExternalIdFactory externalIdFactory;
private final LoanRepository loanRepository;
private final LoanTransactionRepository loanTransactionRepository;
@PostConstruct
public void addListeners() {
businessEventNotifierService.addPostBusinessEventListener(LoanApprovedBusinessEvent.class, new ValidateOnBusinessEvent());
businessEventNotifierService.addPostBusinessEventListener(LoanApprovedBusinessEvent.class, new HoldFundsOnBusinessEvent());
businessEventNotifierService.addPostBusinessEventListener(LoanUndoApprovalBusinessEvent.class, new UndoAllFundTransactions());
businessEventNotifierService.addPostBusinessEventListener(LoanUndoDisbursalBusinessEvent.class,
new ReverseAllFundsOnBusinessEvent());
businessEventNotifierService.addPostBusinessEventListener(LoanAdjustTransactionBusinessEvent.class,
new AdjustFundsOnBusinessEvent());
businessEventNotifierService.addPostBusinessEventListener(LoanTransactionMakeRepaymentPostBusinessEvent.class,
new ReleaseFundsOnBusinessEvent());
businessEventNotifierService.addPostBusinessEventListener(LoanWrittenOffPostBusinessEvent.class, new ReleaseAllFunds());
businessEventNotifierService.addPostBusinessEventListener(LoanUndoWrittenOffBusinessEvent.class, new ReverseFundsOnBusinessEvent());
}
@Override
public void validateGuarantorBusinessRules(Loan loan) {
LoanProduct loanProduct = loan.loanProduct();
BigDecimal principal = loan.getPrincipal().getAmount();
if (loanProduct.isHoldGuaranteeFunds()) {
LoanProductGuaranteeDetails guaranteeData = loanProduct.getLoanProductGuaranteeDetails();
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
BigDecimal mandatoryAmount = principal.multiply(guaranteeData.getMandatoryGuarantee()).divide(BigDecimal.valueOf(100));
BigDecimal minSelfAmount = principal.multiply(guaranteeData.getMinimumGuaranteeFromOwnFunds()).divide(BigDecimal.valueOf(100));
BigDecimal minExtGuarantee = principal.multiply(guaranteeData.getMinimumGuaranteeFromGuarantor())
.divide(BigDecimal.valueOf(100));
BigDecimal actualAmount = BigDecimal.ZERO;
BigDecimal actualSelfAmount = BigDecimal.ZERO;
BigDecimal actualExtGuarantee = BigDecimal.ZERO;
for (Guarantor guarantor : existGuarantorList) {
List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
if (guarantorFundingDetails.getStatus().isActive() || guarantorFundingDetails.getStatus().isWithdrawn()
|| guarantorFundingDetails.getStatus().isCompleted()) {
if (guarantor.isSelfGuarantee()) {
actualSelfAmount = actualSelfAmount.add(guarantorFundingDetails.getAmount())
.subtract(guarantorFundingDetails.getAmountTransfered());
} else {
actualExtGuarantee = actualExtGuarantee.add(guarantorFundingDetails.getAmount())
.subtract(guarantorFundingDetails.getAmountTransfered());
}
}
}
}
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loan.guarantor");
if (actualSelfAmount.compareTo(minSelfAmount) < 0) {
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_SELF_GUARANTEE_ERROR,
minSelfAmount);
}
if (actualExtGuarantee.compareTo(minExtGuarantee) < 0) {
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_EXTERNAL_GUARANTEE_ERROR,
minExtGuarantee);
}
actualAmount = actualAmount.add(actualExtGuarantee).add(actualSelfAmount);
if (actualAmount.compareTo(mandatoryAmount) < 0) {
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_MANDATORY_GUARANTEE_ERROR,
mandatoryAmount);
}
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
}
}
/**
* Method assigns a guarantor to loan and blocks the funds on guarantor's account
*/
@Override
public void assignGuarantor(final GuarantorFundingDetails guarantorFundingDetails, final LocalDate transactionDate) {
if (guarantorFundingDetails.getStatus().isActive()) {
SavingsAccount savingsAccount = guarantorFundingDetails.getLinkedSavingsAccount();
savingsAccount.holdFunds(guarantorFundingDetails.getAmount());
if (savingsAccount.getWithdrawableBalance().compareTo(BigDecimal.ZERO) < 0) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loan.guarantor");
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_INSUFFICIENT_BALANCE_ERROR,
savingsAccount.getId());
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
DepositAccountOnHoldTransaction onHoldTransaction = DepositAccountOnHoldTransaction.hold(savingsAccount,
guarantorFundingDetails.getAmount(), transactionDate);
GuarantorFundingTransaction guarantorFundingTransaction = new GuarantorFundingTransaction(guarantorFundingDetails, null,
onHoldTransaction);
guarantorFundingDetails.addGuarantorFundingTransactions(guarantorFundingTransaction);
this.depositAccountOnHoldTransactionRepository.saveAndFlush(onHoldTransaction);
}
}
/**
* Method releases(withdraw) a guarantor from loan and unblocks the funds on guarantor's account
*/
@Override
public void releaseGuarantor(final GuarantorFundingDetails guarantorFundingDetails, final LocalDate transactionDate) {
BigDecimal amoutForWithdraw = guarantorFundingDetails.getAmountRemaining();
if (amoutForWithdraw.compareTo(BigDecimal.ZERO) > 0 && guarantorFundingDetails.getStatus().isActive()) {
SavingsAccount savingsAccount = guarantorFundingDetails.getLinkedSavingsAccount();
savingsAccount.releaseFunds(amoutForWithdraw);
DepositAccountOnHoldTransaction onHoldTransaction = DepositAccountOnHoldTransaction.release(savingsAccount, amoutForWithdraw,
transactionDate);
GuarantorFundingTransaction guarantorFundingTransaction = new GuarantorFundingTransaction(guarantorFundingDetails, null,
onHoldTransaction);
guarantorFundingDetails.addGuarantorFundingTransactions(guarantorFundingTransaction);
guarantorFundingDetails.releaseFunds(amoutForWithdraw);
guarantorFundingDetails.withdrawFunds(amoutForWithdraw);
guarantorFundingDetails.getLoanAccount().updateGuaranteeAmount(amoutForWithdraw.negate());
this.depositAccountOnHoldTransactionRepository.saveAndFlush(onHoldTransaction);
this.guarantorFundingRepository.saveAndFlush(guarantorFundingDetails);
}
}
/**
* Method is to recover funds from guarantor's in case loan is unpaid. (Transfers guarantee amount from guarantor's
* account to loan account and releases guarantor)
*/
@Override
public void transferFundsFromGuarantor(final Loan loan) {
if (loan.getGuaranteeAmount().compareTo(BigDecimal.ZERO) <= 0) {
return;
}
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
final boolean isRegularTransaction = true;
final boolean isExceptionForBalanceCheck = true;
LocalDate transactionDate = DateUtils.getBusinessLocalDate();
PortfolioAccountType fromAccountType = PortfolioAccountType.SAVINGS;
PortfolioAccountType toAccountType = PortfolioAccountType.LOAN;
final Long toAccountId = loan.getId();
final String description = "Payment from guarantor savings";
final Locale locale = null;
final DateTimeFormatter fmt = null;
final PaymentDetail paymentDetail = null;
final Integer fromTransferType = null;
final Integer toTransferType = null;
final Long chargeId = null;
final Integer loanInstallmentNumber = null;
final Integer transferType = AccountTransferType.LOAN_REPAYMENT.getValue();
final AccountTransferDetails accountTransferDetails = null;
final String noteText = null;
Long loanId = loan.getId();
for (Guarantor guarantor : existGuarantorList) {
final List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
Loan freshLoan = loanRepository.findById(loanId).orElseThrow();
if (guarantorFundingDetails.getStatus().isActive()) {
final SavingsAccount fromSavingsAccount = guarantorFundingDetails.getLinkedSavingsAccount();
final Long fromAccountId = fromSavingsAccount.getId();
releaseLoanIds.put(loanId, guarantorFundingDetails.getId());
try {
BigDecimal remainingAmount = guarantorFundingDetails.getAmountRemaining();
if (freshLoan.getGuaranteeAmount().compareTo(freshLoan.getPrincipal().getAmount()) > 0) {
remainingAmount = remainingAmount.multiply(freshLoan.getPrincipal().getAmount())
.divide(freshLoan.getGuaranteeAmount(), MoneyHelper.getRoundingMode());
}
ExternalId externalId = externalIdFactory.create();
AccountTransferDTO accountTransferDTO = new AccountTransferDTO(transactionDate, remainingAmount, fromAccountType,
toAccountType, fromAccountId, toAccountId, description, locale, fmt, paymentDetail, fromTransferType,
toTransferType, chargeId, loanInstallmentNumber, transferType, accountTransferDetails, noteText, externalId,
null, null, fromSavingsAccount, isRegularTransaction, isExceptionForBalanceCheck);
transferAmount(accountTransferDTO);
} finally {
releaseLoanIds.remove(loanId);
}
}
}
}
}
/**
* @param accountTransferDTO
*/
private void transferAmount(final AccountTransferDTO accountTransferDTO) {
try {
this.accountTransfersWritePlatformService.transferFunds(accountTransferDTO);
} catch (final InsufficientAccountBalanceException e) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loan.guarantor");
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_INSUFFICIENT_BALANCE_ERROR,
accountTransferDTO.getFromAccountId(), accountTransferDTO.getToAccountId());
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors, e);
}
}
/**
* Method reverses all blocked fund(both hold and release) transactions. example: reverses all transactions on undo
* approval of loan account.
*
*/
private void reverseAllFundTransaction(final Loan loan) {
if (loan.getGuaranteeAmount().compareTo(BigDecimal.ZERO) > 0) {
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
List<GuarantorFundingDetails> guarantorFundingDetailList = new ArrayList<>();
for (Guarantor guarantor : existGuarantorList) {
final List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
guarantorFundingDetails.undoAllTransactions();
guarantorFundingDetailList.add(guarantorFundingDetails);
}
}
if (!guarantorFundingDetailList.isEmpty()) {
loan.setGuaranteeAmount(null);
this.guarantorFundingRepository.saveAll(guarantorFundingDetailList);
}
}
}
/**
* Method holds all guarantor's guarantee amount for a loan account. example: hold funds on approval of loan
* account.
*
*/
private void holdGuarantorFunds(final Loan loan) {
if (loan.loanProduct().isHoldGuaranteeFunds()) {
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
List<GuarantorFundingDetails> guarantorFundingDetailList = new ArrayList<>();
List<DepositAccountOnHoldTransaction> onHoldTransactions = new ArrayList<>();
BigDecimal totalGuarantee = BigDecimal.ZERO;
List<Long> insufficientBalanceIds = new ArrayList<>();
for (Guarantor guarantor : existGuarantorList) {
final List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
if (guarantorFundingDetails.getStatus().isActive()) {
final SavingsAccount savingsAccount = guarantorFundingDetails.getLinkedSavingsAccount();
if (loan.isApproved() && !loan.isDisbursed()) {
final List<SavingsAccountTransaction> transactions = new ArrayList<>();
for (final SavingsAccountTransaction transaction : savingsAccount.getTransactions()) {
if (!DateUtils.isAfter(transaction.getTransactionDate(), loan.getApprovedOnDate())) {
transactions.add(transaction);
}
}
this.savingsAccountAssembler.setHelpers(savingsAccount);
savingsAccount.updateSavingsAccountSummary(transactions);
}
savingsAccount.holdFunds(guarantorFundingDetails.getAmount());
totalGuarantee = totalGuarantee.add(guarantorFundingDetails.getAmount());
DepositAccountOnHoldTransaction onHoldTransaction = DepositAccountOnHoldTransaction.hold(savingsAccount,
guarantorFundingDetails.getAmount(), loan.getApprovedOnDate());
onHoldTransactions.add(onHoldTransaction);
GuarantorFundingTransaction guarantorFundingTransaction = new GuarantorFundingTransaction(guarantorFundingDetails,
null, onHoldTransaction);
guarantorFundingDetails.addGuarantorFundingTransactions(guarantorFundingTransaction);
guarantorFundingDetailList.add(guarantorFundingDetails);
if (savingsAccount.getWithdrawableBalance().compareTo(BigDecimal.ZERO) < 0) {
insufficientBalanceIds.add(savingsAccount.getId());
}
savingsAccount.updateSavingsAccountSummary(savingsAccount.getTransactions());
}
}
}
if (!insufficientBalanceIds.isEmpty()) {
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("loan.guarantor");
baseDataValidator.reset().failWithCodeNoParameterAddedToErrorCode(GuarantorConstants.GUARANTOR_INSUFFICIENT_BALANCE_ERROR,
insufficientBalanceIds);
throw new PlatformApiDataValidationException("validation.msg.validation.errors.exist", "Validation errors exist.",
dataValidationErrors);
}
loan.setGuaranteeAmount(totalGuarantee);
if (!guarantorFundingDetailList.isEmpty()) {
this.depositAccountOnHoldTransactionRepository.saveAll(onHoldTransactions);
this.guarantorFundingRepository.saveAll(guarantorFundingDetailList);
}
}
}
/**
* Method releases all guarantor's guarantee amount(first external guarantee and then self guarantee) for a loan
* account in the portion of guarantee percentage on a paid principal. example: releases funds on repayments of loan
* account.
*
*/
private void releaseGuarantorFunds(final LoanTransaction loanTransaction) {
final Loan loan = loanTransaction.getLoan();
if (loan.getGuaranteeAmount().compareTo(BigDecimal.ZERO) > 0) {
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
List<GuarantorFundingDetails> externalGuarantorList = new ArrayList<>();
List<GuarantorFundingDetails> selfGuarantorList = new ArrayList<>();
BigDecimal selfGuarantee = BigDecimal.ZERO;
BigDecimal guarantorGuarantee = BigDecimal.ZERO;
for (Guarantor guarantor : existGuarantorList) {
final List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
if (guarantorFundingDetails.getStatus().isActive()) {
if (guarantor.isSelfGuarantee()) {
selfGuarantorList.add(guarantorFundingDetails);
selfGuarantee = selfGuarantee.add(guarantorFundingDetails.getAmountRemaining());
} else if (guarantor.isExistingCustomer()) {
externalGuarantorList.add(guarantorFundingDetails);
guarantorGuarantee = guarantorGuarantee.add(guarantorFundingDetails.getAmountRemaining());
}
}
}
}
BigDecimal amountForRelease = loanTransaction.getPrincipalPortion();
BigDecimal totalGuaranteeAmount = loan.getGuaranteeAmount();
BigDecimal principal = loan.getPrincipal().getAmount();
if ((amountForRelease != null) && (totalGuaranteeAmount != null)) {
amountForRelease = amountForRelease.multiply(totalGuaranteeAmount).divide(principal, MoneyHelper.getRoundingMode());
List<DepositAccountOnHoldTransaction> accountOnHoldTransactions = new ArrayList<>();
BigDecimal amountLeft = calculateAndRelaseGuarantorFunds(externalGuarantorList, guarantorGuarantee, amountForRelease,
loanTransaction, accountOnHoldTransactions);
if (amountLeft.compareTo(BigDecimal.ZERO) > 0) {
calculateAndRelaseGuarantorFunds(selfGuarantorList, selfGuarantee, amountLeft, loanTransaction,
accountOnHoldTransactions);
externalGuarantorList.addAll(selfGuarantorList);
}
if (!externalGuarantorList.isEmpty()) {
this.depositAccountOnHoldTransactionRepository.saveAll(accountOnHoldTransactions);
this.guarantorFundingRepository.saveAll(externalGuarantorList);
}
}
}
}
/**
* Method releases all guarantor's guarantee amount. example: releases funds on write-off of a loan account.
*
*/
private void releaseAllGuarantors(final LoanTransaction loanTransaction) {
Loan loan = loanTransaction.getLoan();
if (loan.getGuaranteeAmount().compareTo(BigDecimal.ZERO) > 0) {
final List<Guarantor> existGuarantorList = this.guarantorRepository.findByLoan(loan);
List<GuarantorFundingDetails> saveGuarantorFundingDetails = new ArrayList<>();
List<DepositAccountOnHoldTransaction> onHoldTransactions = new ArrayList<>();
for (Guarantor guarantor : existGuarantorList) {
final List<GuarantorFundingDetails> fundingDetails = guarantor.getGuarantorFundDetails();
for (GuarantorFundingDetails guarantorFundingDetails : fundingDetails) {
BigDecimal amoutForRelease = guarantorFundingDetails.getAmountRemaining();
if (amoutForRelease.compareTo(BigDecimal.ZERO) > 0 && guarantorFundingDetails.getStatus().isActive()) {
SavingsAccount savingsAccount = guarantorFundingDetails.getLinkedSavingsAccount();
savingsAccount.releaseFunds(amoutForRelease);
DepositAccountOnHoldTransaction onHoldTransaction = DepositAccountOnHoldTransaction.release(savingsAccount,
amoutForRelease, loanTransaction.getTransactionDate());
onHoldTransactions.add(onHoldTransaction);
GuarantorFundingTransaction guarantorFundingTransaction = new GuarantorFundingTransaction(guarantorFundingDetails,
loanTransaction, onHoldTransaction);
guarantorFundingDetails.addGuarantorFundingTransactions(guarantorFundingTransaction);
guarantorFundingDetails.releaseFunds(amoutForRelease);
saveGuarantorFundingDetails.add(guarantorFundingDetails);
}
}
}
if (!saveGuarantorFundingDetails.isEmpty()) {
this.depositAccountOnHoldTransactionRepository.saveAll(onHoldTransactions);
this.guarantorFundingRepository.saveAll(saveGuarantorFundingDetails);
}
}
}
/**
* Method releases guarantor's guarantee amount on transferring guarantee amount to loan account. example: on
* recovery of guarantee funds from guarantor's.
*/
private void completeGuarantorFund(final LoanTransaction loanTransaction) {
Loan loan = loanTransaction.getLoan();
GuarantorFundingDetails guarantorFundingDetails = this.guarantorFundingRepository.findById(releaseLoanIds.get(loan.getId()))
.orElse(null);
if (guarantorFundingDetails != null) {
BigDecimal amountForRelease = loanTransaction.getAmount(loan.getCurrency()).getAmount();
BigDecimal guarantorGuarantee = amountForRelease;
List<GuarantorFundingDetails> guarantorList = Arrays.asList(guarantorFundingDetails);
final List<DepositAccountOnHoldTransaction> accountOnHoldTransactions = new ArrayList<>();
calculateAndRelaseGuarantorFunds(guarantorList, guarantorGuarantee, amountForRelease, loanTransaction,
accountOnHoldTransactions);
this.depositAccountOnHoldTransactionRepository.saveAll(accountOnHoldTransactions);
this.guarantorFundingRepository.saveAndFlush(guarantorFundingDetails);
}
}
private BigDecimal calculateAndRelaseGuarantorFunds(List<GuarantorFundingDetails> guarantorList, BigDecimal totalGuaranteeAmount,
BigDecimal amountForRelease, LoanTransaction loanTransaction,
final List<DepositAccountOnHoldTransaction> accountOnHoldTransactions) {
BigDecimal amountLeft = amountForRelease;
for (GuarantorFundingDetails fundingDetails : guarantorList) {
BigDecimal guarantorAmount = amountForRelease.multiply(fundingDetails.getAmountRemaining()).divide(totalGuaranteeAmount,
MoneyHelper.getRoundingMode());
if (fundingDetails.getAmountRemaining().compareTo(guarantorAmount) <= 0) {
guarantorAmount = fundingDetails.getAmountRemaining();
}
fundingDetails.releaseFunds(guarantorAmount);
SavingsAccount savingsAccount = fundingDetails.getLinkedSavingsAccount();
savingsAccount.releaseFunds(guarantorAmount);
DepositAccountOnHoldTransaction onHoldTransaction = DepositAccountOnHoldTransaction.release(savingsAccount, guarantorAmount,
loanTransaction.getTransactionDate());
accountOnHoldTransactions.add(onHoldTransaction);
GuarantorFundingTransaction guarantorFundingTransaction = new GuarantorFundingTransaction(fundingDetails, loanTransaction,
onHoldTransaction);
fundingDetails.addGuarantorFundingTransactions(guarantorFundingTransaction);
amountLeft = amountLeft.subtract(guarantorAmount);
}
return amountLeft;
}
/**
* Method reverses the fund release transactions in case of loan transaction reversed
*/
private void reverseTransaction(final List<Long> loanTransactionIds) {
List<GuarantorFundingTransaction> fundingTransactions = this.guarantorFundingTransactionRepository
.fetchGuarantorFundingTransactions(loanTransactionIds);
for (GuarantorFundingTransaction fundingTransaction : fundingTransactions) {
fundingTransaction.reverseTransaction();
}
if (!fundingTransactions.isEmpty()) {
this.guarantorFundingTransactionRepository.saveAll(fundingTransactions);
}
}
private final class ValidateOnBusinessEvent implements BusinessEventListener<LoanApprovedBusinessEvent> {
@Override
public void onBusinessEvent(LoanApprovedBusinessEvent event) {
Loan loan = event.get();
validateGuarantorBusinessRules(loan);
}
}
private final class HoldFundsOnBusinessEvent implements BusinessEventListener<LoanApprovedBusinessEvent> {
@Override
public void onBusinessEvent(LoanApprovedBusinessEvent event) {
Loan loan = event.get();
holdGuarantorFunds(loan);
}
}
private final class ReleaseFundsOnBusinessEvent implements BusinessEventListener<LoanTransactionMakeRepaymentPostBusinessEvent> {
@Override
public void onBusinessEvent(LoanTransactionMakeRepaymentPostBusinessEvent event) {
LoanTransaction loanTransaction = event.get();
if (releaseLoanIds.containsKey(loanTransaction.getLoan().getId())) {
completeGuarantorFund(loanTransaction);
} else {
releaseGuarantorFunds(loanTransaction);
}
}
}
private final class ReverseFundsOnBusinessEvent implements BusinessEventListener<LoanUndoWrittenOffBusinessEvent> {
@Override
public void onBusinessEvent(LoanUndoWrittenOffBusinessEvent event) {
LoanTransaction loanTransaction = event.get();
List<Long> reversedTransactions = new ArrayList<>();
reversedTransactions.add(loanTransaction.getId());
reverseTransaction(reversedTransactions);
}
}
private final class AdjustFundsOnBusinessEvent implements BusinessEventListener<LoanAdjustTransactionBusinessEvent> {
@Override
public void onBusinessEvent(LoanAdjustTransactionBusinessEvent event) {
LoanAdjustTransactionBusinessEvent.Data eventData = event.get();
LoanTransaction loanTransaction = eventData.getTransactionToAdjust();
List<Long> reersedTransactions = new ArrayList<>(1);
reersedTransactions.add(loanTransaction.getId());
reverseTransaction(reersedTransactions);
LoanTransaction newTransaction = event.get().getNewTransactionDetail();
if (newTransaction != null) {
releaseGuarantorFunds(newTransaction);
}
}
}
private final class ReverseAllFundsOnBusinessEvent implements BusinessEventListener<LoanUndoDisbursalBusinessEvent> {
@Override
public void onBusinessEvent(LoanUndoDisbursalBusinessEvent event) {
Loan loan = event.get();
List<Long> reversedTransactions = new ArrayList<>(loanTransactionRepository.findTransactionIdsByLoan(loan));
reverseTransaction(reversedTransactions);
}
}
private final class UndoAllFundTransactions implements BusinessEventListener<LoanUndoApprovalBusinessEvent> {
@Override
public void onBusinessEvent(LoanUndoApprovalBusinessEvent event) {
Loan loan = event.get();
reverseAllFundTransaction(loan);
}
}
private final class ReleaseAllFunds implements BusinessEventListener<LoanWrittenOffPostBusinessEvent> {
@Override
public void onBusinessEvent(LoanWrittenOffPostBusinessEvent event) {
LoanTransaction loanTransaction = event.get();
releaseAllGuarantors(loanTransaction);
}
}
}
|
apache/hadoop | 34,933 | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/diskbalancer/command/TestDiskBalancerCommand.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.hdfs.server.diskbalancer.command;
import static java.lang.Thread.sleep;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.CANCEL;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.EXECUTE;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.HELP;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.NODE;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.OUTFILE;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.PLAN;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.QUERY;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.REPORT;
import static org.apache.hadoop.hdfs.tools.DiskBalancerCLI.SKIPDATECHECK;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintStream;
import java.net.URI;
import java.util.List;
import java.util.Scanner;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerTestUtil;
import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ClusterConnector;
import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ConnectorFactory;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster;
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
import org.apache.hadoop.hdfs.tools.DiskBalancerCLI;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
/**
* Tests various CLI commands of DiskBalancer.
*/
public class TestDiskBalancerCommand {
private MiniDFSCluster cluster;
private URI clusterJson;
private Configuration conf = new HdfsConfiguration();
private final static int DEFAULT_BLOCK_SIZE = 1024;
private final static int FILE_LEN = 200 * 1024;
private final static long CAPCACITY = 300 * 1024;
private final static long[] CAPACITIES = new long[] {CAPCACITY, CAPCACITY};
@BeforeEach
public void setUp() throws Exception {
conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3)
.storagesPerDatanode(2).build();
cluster.waitActive();
clusterJson = getClass().getResource(
"/diskBalancer/data-cluster-64node-3disk.json").toURI();
}
@AfterEach
public void tearDown() throws Exception {
if (cluster != null) {
// Just make sure we can shutdown datanodes.
for (int i = 0; i < cluster.getDataNodes().size(); i++) {
cluster.getDataNodes().get(i).shutdown();
}
cluster.shutdown();
}
}
/**
* Tests if it's allowed to submit and execute plan when Datanode is in status
* other than REGULAR.
*/
@Test
@Timeout(value = 60)
public void testSubmitPlanInNonRegularStatus() throws Exception {
final int numDatanodes = 1;
MiniDFSCluster miniCluster = null;
final Configuration hdfsConf = new HdfsConfiguration();
try {
/* new cluster with imbalanced capacity */
miniCluster = DiskBalancerTestUtil.newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN,
StartupOption.ROLLBACK);
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
try {
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s",
EXECUTE,
planFileFullName);
runCommand(cmdLine, hdfsConf, miniCluster);
} catch(RemoteException e) {
assertThat(e.getClassName()).contains("DiskBalancerException");
assertThat(e.toString()).contains("Datanode is in special state")
.contains("Disk balancing not permitted.");
}
} finally {
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
/**
* Tests running multiple commands under on setup. This mainly covers
* {@link org.apache.hadoop.hdfs.server.diskbalancer.command.Command#close}
*/
@Test
@Timeout(value = 120)
public void testRunMultipleCommandsUnderOneSetup() throws Exception {
final int numDatanodes = 1;
MiniDFSCluster miniCluster = null;
final Configuration hdfsConf = new HdfsConfiguration();
try {
/* new cluster with imbalanced capacity */
miniCluster = DiskBalancerTestUtil.newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s",
EXECUTE,
planFileFullName);
runCommand(cmdLine, hdfsConf, miniCluster);
} finally {
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
@Test
@Timeout(value = 600)
public void testDiskBalancerExecuteOptionPlanValidityWithException() throws
Exception {
final int numDatanodes = 1;
final Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
hdfsConf.set(DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, "0d");
/* new cluster with imbalanced capacity */
final MiniDFSCluster miniCluster = DiskBalancerTestUtil.
newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
try {
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s",
EXECUTE,
planFileFullName);
LambdaTestUtils.intercept(
RemoteException.class,
"DiskBalancerException",
"Plan was generated more than 0d ago",
() -> {
runCommand(cmdLine, hdfsConf, miniCluster);
});
} finally{
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
@Test
@Timeout(value = 600)
public void testDiskBalancerExecutePlanValidityWithOutUnitException()
throws
Exception {
final int numDatanodes = 1;
final Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
hdfsConf.set(DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, "0");
/* new cluster with imbalanced capacity */
final MiniDFSCluster miniCluster = DiskBalancerTestUtil.
newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
try {
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s",
EXECUTE,
planFileFullName);
LambdaTestUtils.intercept(
RemoteException.class,
"DiskBalancerException",
"Plan was generated more than 0ms ago",
() -> {
runCommand(cmdLine, hdfsConf, miniCluster);
});
} finally{
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
@Test
@Timeout(value = 600)
public void testDiskBalancerForceExecute() throws
Exception {
final int numDatanodes = 1;
final Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
hdfsConf.set(DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, "0d");
/* new cluster with imbalanced capacity */
final MiniDFSCluster miniCluster = DiskBalancerTestUtil.
newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
try {
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s -%s",
EXECUTE,
planFileFullName,
SKIPDATECHECK);
// Disk Balancer should execute the plan, as skipDateCheck Option is
// specified
runCommand(cmdLine, hdfsConf, miniCluster);
} finally{
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
@Test
@Timeout(value = 600)
public void testDiskBalancerExecuteOptionPlanValidity() throws Exception {
final int numDatanodes = 1;
final Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
hdfsConf.set(DFSConfigKeys.DFS_DISK_BALANCER_PLAN_VALID_INTERVAL, "600s");
/* new cluster with imbalanced capacity */
final MiniDFSCluster miniCluster = DiskBalancerTestUtil.
newImbalancedCluster(
hdfsConf,
numDatanodes,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
try {
/* get full path of plan */
final String planFileFullName = runAndVerifyPlan(miniCluster, hdfsConf);
/* run execute command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s",
EXECUTE,
planFileFullName);
// Plan is valid for 600 seconds, sleeping for 10seconds, so now
// diskbalancer should execute the plan
sleep(10000);
runCommand(cmdLine, hdfsConf, miniCluster);
} finally{
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
private String runAndVerifyPlan(
final MiniDFSCluster miniCluster,
final Configuration hdfsConf) throws Exception {
String cmdLine = "";
List<String> outputs = null;
final DataNode dn = miniCluster.getDataNodes().get(0);
/* run plan command */
cmdLine = String.format(
"hdfs diskbalancer -%s %s",
PLAN,
dn.getDatanodeUuid());
outputs = runCommand(cmdLine, hdfsConf, miniCluster);
/* get path of plan file*/
final String planFileName = dn.getDatanodeUuid();
/* verify plan command */
assertEquals(2, outputs.size(), "There must be two lines: the 1st is writing plan to...,"
+ " the 2nd is actual full path of plan file.");
assertThat(outputs.get(1)).contains(planFileName);
/* get full path of plan file*/
final String planFileFullName = outputs.get(1);
return planFileFullName;
}
/* test exception on invalid arguments */
@Test
@Timeout(value = 60)
public void testExceptionOnInvalidArguments() throws Exception {
final String cmdLine = "hdfs diskbalancer random1 -report random2 random3";
HadoopIllegalArgumentException ex = assertThrows(HadoopIllegalArgumentException.class, () -> {
runCommand(cmdLine);
});
assertTrue(ex.getMessage().contains(
"Invalid or extra Arguments: [random1, random2, random3]"));
}
/* test basic report */
@Test
@Timeout(value = 60)
public void testReportSimple() throws Exception {
final String cmdLine = "hdfs diskbalancer -report";
final List<String> outputs = runCommand(cmdLine);
assertThat(
outputs.get(0)).
contains("Processing report command");
assertThat(
outputs.get(1))
.contains("No top limit specified")
.contains("using default top value")
.contains("100");
assertThat(
outputs.get(2))
.contains("Reporting top")
.contains("64")
.contains("DataNode(s) benefiting from running DiskBalancer");
assertThat(
outputs.get(32))
.contains("30/64 null[null:0]")
.contains("a87654a9-54c7-4693-8dd9-c9c7021dc340")
.contains("9 volumes with node data density 1.97");
}
/* test basic report with negative top limit */
@Test
@Timeout(value = 60)
public void testReportWithNegativeTopLimit()
throws Exception {
final String cmdLine = "hdfs diskbalancer -report -top -32";
IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, () -> {
runCommand(cmdLine);
});
assertTrue(ex.getMessage().contains("Top limit input should be a positive numeric value"));
}
/* test less than 64 DataNode(s) as total, e.g., -report -top 32 */
@Test
@Timeout(value = 60)
public void testReportLessThanTotal() throws Exception {
final String cmdLine = "hdfs diskbalancer -report -top 32";
final List<String> outputs = runCommand(cmdLine);
assertThat(
outputs.get(0))
.contains("Processing report command");
assertThat(
outputs.get(1))
.contains(
"Reporting top",
"32",
"DataNode(s) benefiting from running DiskBalancer"
);
assertThat(outputs.get(31))
.contains(
"30/32 null[null:0]",
"a87654a9-54c7-4693-8dd9-c9c7021dc340",
"9 volumes with node data density 1.97"
);
}
/**
* This test simulates DiskBalancerCLI Report command run from a shell
* with a generic option 'fs'.
* @throws Exception
*/
@Test
@Timeout(value = 60)
public void testReportWithGenericOptionFS() throws Exception {
final String topReportArg = "5";
final String reportArgs = String.format("-%s file:%s -%s -%s %s",
"fs", clusterJson.getPath(),
REPORT, "top", topReportArg);
final String cmdLine = String.format("%s", reportArgs);
final List<String> outputs = runCommand(cmdLine);
assertThat(outputs.get(0)).contains("Processing report command");
assertThat(outputs.get(1))
.contains(
"Reporting top",
topReportArg,
"DataNode(s) benefiting from running DiskBalancer");
}
/* test more than 64 DataNode(s) as total, e.g., -report -top 128 */
@Test
@Timeout(value = 60)
public void testReportMoreThanTotal() throws Exception {
final String cmdLine = "hdfs diskbalancer -report -top 128";
final List<String> outputs = runCommand(cmdLine);
assertThat(
outputs.get(0)).contains("Processing report command");
assertThat(outputs.get(1))
.contains(
"Reporting top",
"64",
"DataNode(s) benefiting from running DiskBalancer"
);
assertThat(outputs.get(31))
.contains(
"30/64 null[null:0]",
"a87654a9-54c7-4693-8dd9-c9c7021dc340",
"9 volumes with node data density 1.97"
);
}
/* test invalid top limit, e.g., -report -top xx */
@Test
@Timeout(value = 60)
public void testReportInvalidTopLimit() throws Exception {
final String cmdLine = "hdfs diskbalancer -report -top xx";
final List<String> outputs = runCommand(cmdLine);
assertThat(
outputs.get(0)).contains("Processing report command");
assertThat(outputs.get(1))
.contains(
"Top limit input is not numeric",
"using default top value",
"100"
);
assertThat(outputs.get(2))
.contains(
"Reporting top",
"64",
"DataNode(s) benefiting from running DiskBalancer"
);
assertThat(outputs.get(32))
.contains(
"30/64 null[null:0]",
"a87654a9-54c7-4693-8dd9-c9c7021dc340",
"9 volumes with node data density 1.97"
);
}
@Test
@Timeout(value = 60)
public void testReportNode() throws Exception {
final String cmdLine =
"hdfs diskbalancer -report -node " +
"a87654a9-54c7-4693-8dd9-c9c7021dc340";
final List<String> outputs = runCommand(cmdLine);
assertThat(
outputs.get(0)).contains("Processing report command");
assertThat(outputs.get(1))
.contains(
"Reporting volume information for DataNode",
"a87654a9-54c7-4693-8dd9-c9c7021dc340"
);
assertThat(outputs.get(2))
.contains(
"null[null:0]",
"a87654a9-54c7-4693-8dd9-c9c7021dc340",
"9 volumes with node data density 1.97"
);
assertThat(outputs.get(3))
.contains(
"DISK",
"/tmp/disk/KmHefYNURo",
"0.20 used: 39160240782/200000000000",
"0.80 free: 160839759218/200000000000"
);
assertThat(outputs.get(4))
.contains(
"DISK",
"/tmp/disk/Mxfcfmb24Y",
"0.92 used: 733099315216/800000000000",
"0.08 free: 66900684784/800000000000"
);
assertThat(outputs.get(5))
.contains(
"DISK",
"/tmp/disk/xx3j3ph3zd",
"0.72 used: 289544224916/400000000000",
"0.28 free: 110455775084/400000000000"
);
assertThat(outputs.get(6))
.contains(
"RAM_DISK",
"/tmp/disk/BoBlQFxhfw",
"0.60 used: 477590453390/800000000000",
"0.40 free: 322409546610/800000000000"
);
assertThat(outputs.get(7))
.contains(
"RAM_DISK",
"/tmp/disk/DtmAygEU6f",
"0.34 used: 134602910470/400000000000",
"0.66 free: 265397089530/400000000000"
);
assertThat(outputs.get(8))
.contains(
"RAM_DISK",
"/tmp/disk/MXRyYsCz3U",
"0.55 used: 438102096853/800000000000",
"0.45 free: 361897903147/800000000000"
);
assertThat(outputs.get(9))
.contains(
"SSD",
"/tmp/disk/BGe09Y77dI",
"0.89 used: 890446265501/1000000000000",
"0.11 free: 109553734499/1000000000000"
);
assertThat(outputs.get(10))
.contains(
"SSD",
"/tmp/disk/JX3H8iHggM",
"0.31 used: 2782614512957/9000000000000",
"0.69 free: 6217385487043/9000000000000"
);
assertThat(outputs.get(11))
.contains(
"SSD",
"/tmp/disk/uLOYmVZfWV",
"0.75 used: 1509592146007/2000000000000",
"0.25 free: 490407853993/2000000000000"
);
}
@Test
@Timeout(value = 60)
public void testReportNodeWithoutJson() throws Exception {
String dataNodeUuid = cluster.getDataNodes().get(0).getDatanodeUuid();
final String planArg = String.format("-%s -%s %s",
REPORT, NODE, dataNodeUuid);
final String cmdLine = String
.format(
"hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
assertThat(
outputs.get(0)).contains("Processing report command");
assertThat(outputs.get(1))
.contains(
"Reporting volume information for DataNode",
dataNodeUuid
);
assertThat(outputs.get(2))
.contains(
dataNodeUuid,
"2 volumes with node data density 0.00"
);
assertThat(outputs.get(3))
.contains(
"DISK",
new Path(cluster.getInstanceStorageDir(0, 0).getAbsolutePath()).toString(),
"0.00",
"1.00"
);
assertThat(outputs.get(4))
.contains(
"DISK",
new Path(cluster.getInstanceStorageDir(0, 1).getAbsolutePath()).toString(),
"0.00",
"1.00"
);
}
@Test
@Timeout(value = 60)
public void testReadClusterFromJson() throws Exception {
ClusterConnector jsonConnector = ConnectorFactory.getCluster(clusterJson,
conf);
DiskBalancerCluster diskBalancerCluster = new DiskBalancerCluster(
jsonConnector);
diskBalancerCluster.readClusterInfo();
assertEquals(64, diskBalancerCluster.getNodes().size());
}
/* test -plan DataNodeID */
@Test
@Timeout(value = 60)
public void testPlanNode() throws Exception {
final String planArg = String.format("-%s %s", PLAN,
cluster.getDataNodes().get(0).getDatanodeUuid());
final String cmdLine = String
.format(
"hdfs diskbalancer %s", planArg);
runCommand(cmdLine, cluster);
}
/* test -plan DataNodeID */
@Test
@Timeout(value = 60)
public void testPlanJsonNode() throws Exception {
final String planArg = String.format("-%s %s", PLAN,
"a87654a9-54c7-4693-8dd9-c9c7021dc340");
final Path testPath = new Path(
PathUtils.getTestPath(getClass()),
GenericTestUtils.getMethodName());
final String cmdLine = String
.format(
"hdfs diskbalancer -out %s %s", testPath, planArg);
runCommand(cmdLine);
}
/* Test that illegal arguments are handled correctly*/
@Test
@Timeout(value = 60)
public void testIllegalArgument() throws Exception {
final String planArg = String.format("-%s %s", PLAN,
"a87654a9-54c7-4693-8dd9-c9c7021dc340");
final String cmdLine = String
.format(
"hdfs diskbalancer %s -report", planArg);
// -plan and -report cannot be used together.
// tests the validate command line arguments function.
assertThrows(java.lang.IllegalArgumentException.class, () -> {
runCommand(cmdLine);
});
}
@Test
@Timeout(value = 60)
public void testCancelCommand() throws Exception {
final String cancelArg = String.format("-%s %s", CANCEL, "nosuchplan");
final String nodeArg = String.format("-%s %s", NODE,
cluster.getDataNodes().get(0).getDatanodeUuid());
// Port:Host format is expected. So cancel command will throw.
assertThrows(java.lang.IllegalArgumentException.class, () -> {
final String cmdLine = String
.format(
"hdfs diskbalancer %s %s", cancelArg, nodeArg);
runCommand(cmdLine);
});
}
/*
Makes an invalid query attempt to non-existent Datanode.
*/
@Test
@Timeout(value = 60)
public void testQueryCommand() throws Exception {
final String queryArg = String.format("-%s %s", QUERY,
cluster.getDataNodes().get(0).getDatanodeUuid());
assertThrows(java.net.UnknownHostException.class, () -> {
final String cmdLine = String
.format(
"hdfs diskbalancer %s", queryArg);
runCommand(cmdLine);
});
}
@Test
@Timeout(value = 60)
public void testHelpCommand() throws Exception {
final String helpArg = String.format("-%s", HELP);
final String cmdLine = String
.format(
"hdfs diskbalancer %s", helpArg);
runCommand(cmdLine);
}
@Test
public void testPrintFullPathOfPlan()
throws Exception {
String parent = GenericTestUtils.getRandomizedTempPath();
MiniDFSCluster miniCluster = null;
try {
Configuration hdfsConf = new HdfsConfiguration();
List<String> outputs = null;
/* new cluster with imbalanced capacity */
miniCluster = DiskBalancerTestUtil.newImbalancedCluster(
hdfsConf,
1,
CAPACITIES,
DEFAULT_BLOCK_SIZE,
FILE_LEN);
/* run plan command */
final String cmdLine = String.format(
"hdfs diskbalancer -%s %s -%s %s",
PLAN,
miniCluster.getDataNodes().get(0).getDatanodeUuid(),
OUTFILE,
parent);
outputs = runCommand(cmdLine, hdfsConf, miniCluster);
/* get full path */
final String planFileFullName = new Path(
parent,
miniCluster.getDataNodes().get(0).getDatanodeUuid()).toString();
/* verify the path of plan */
assertEquals(2, outputs.size(), "There must be two lines: the 1st is writing plan to,"
+ " the 2nd is actual full path of plan file.");
assertThat(outputs.get(0)).contains("Writing plan to");
assertThat(outputs.get(1)).contains(planFileFullName);
} finally {
if (miniCluster != null) {
miniCluster.shutdown();
}
}
}
private List<String> runCommandInternal(
final String cmdLine,
final Configuration clusterConf) throws Exception {
String[] cmds = StringUtils.split(cmdLine, ' ');
ByteArrayOutputStream bufOut = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bufOut);
Tool diskBalancerTool = new DiskBalancerCLI(clusterConf, out);
ToolRunner.run(clusterConf, diskBalancerTool, cmds);
Scanner scanner = new Scanner(bufOut.toString());
List<String> outputs = Lists.newArrayList();
while (scanner.hasNextLine()) {
outputs.add(scanner.nextLine());
}
return outputs;
}
private List<String> runCommandInternal(final String cmdLine)
throws Exception {
return runCommandInternal(cmdLine, conf);
}
private List<String> runCommand(final String cmdLine) throws Exception {
FileSystem.setDefaultUri(conf, clusterJson);
return runCommandInternal(cmdLine);
}
private List<String> runCommand(final String cmdLine,
MiniDFSCluster miniCluster) throws Exception {
FileSystem.setDefaultUri(conf, miniCluster.getURI());
return runCommandInternal(cmdLine);
}
private List<String> runCommand(
final String cmdLine,
Configuration clusterConf,
MiniDFSCluster miniCluster) throws Exception {
FileSystem.setDefaultUri(clusterConf, miniCluster.getURI());
return runCommandInternal(cmdLine, clusterConf);
}
/**
* Making sure that we can query the multiple nodes without having done a submit.
* @throws Exception
*/
@Test
public void testDiskBalancerQueryWithoutSubmitAndMultipleNodes() throws Exception {
Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
final int numDatanodes = 2;
File basedir = new File(GenericTestUtils.getRandomizedTempPath());
MiniDFSCluster miniDFSCluster = new MiniDFSCluster.Builder(hdfsConf, basedir)
.numDataNodes(numDatanodes).build();
try {
miniDFSCluster.waitActive();
DataNode dataNode1 = miniDFSCluster.getDataNodes().get(0);
DataNode dataNode2 = miniDFSCluster.getDataNodes().get(1);
final String queryArg = String.format("-query localhost:%d,localhost:%d", dataNode1
.getIpcPort(), dataNode2.getIpcPort());
final String cmdLine = String.format("hdfs diskbalancer %s", queryArg);
List<String> outputs = runCommand(cmdLine);
assertEquals(12, outputs.size());
assertTrue(
outputs.get(1).contains("localhost:" + dataNode1.getIpcPort())
|| outputs.get(6).contains("localhost:" + dataNode1.getIpcPort()),
"Expected outputs: " + outputs);
assertTrue(
outputs.get(1).contains("localhost:" + dataNode2.getIpcPort())
|| outputs.get(6).contains("localhost:" + dataNode2.getIpcPort()),
"Expected outputs: " + outputs);
} finally {
miniDFSCluster.shutdown();
}
}
/**
* Making sure that we can query the node without having done a submit.
* @throws Exception
*/
@Test
public void testDiskBalancerQueryWithoutSubmit() throws Exception {
Configuration hdfsConf = new HdfsConfiguration();
hdfsConf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true);
final int numDatanodes = 2;
File basedir = new File(GenericTestUtils.getRandomizedTempPath());
MiniDFSCluster miniDFSCluster = new MiniDFSCluster.Builder(hdfsConf, basedir)
.numDataNodes(numDatanodes).build();
try {
miniDFSCluster.waitActive();
DataNode dataNode = miniDFSCluster.getDataNodes().get(0);
final String queryArg = String.format("-query localhost:%d", dataNode
.getIpcPort());
final String cmdLine = String.format("hdfs diskbalancer %s",
queryArg);
runCommand(cmdLine);
} finally {
miniDFSCluster.shutdown();
}
}
@Test
@Timeout(value = 60)
public void testGetNodeList() throws Exception {
ClusterConnector jsonConnector =
ConnectorFactory.getCluster(clusterJson, conf);
DiskBalancerCluster diskBalancerCluster =
new DiskBalancerCluster(jsonConnector);
diskBalancerCluster.readClusterInfo();
int nodeNum = 5;
StringBuilder listArg = new StringBuilder();
for (int i = 0; i < nodeNum; i++) {
listArg.append(diskBalancerCluster.getNodes().get(i).getDataNodeUUID())
.append(",");
}
ReportCommand command = new ReportCommand(conf, null);
command.setCluster(diskBalancerCluster);
List<DiskBalancerDataNode> nodeList = command.getNodes(listArg.toString());
assertEquals(nodeNum, nodeList.size());
}
@Test
@Timeout(value = 60)
public void testReportCommandWithMultipleNodes() throws Exception {
String dataNodeUuid1 = cluster.getDataNodes().get(0).getDatanodeUuid();
String dataNodeUuid2 = cluster.getDataNodes().get(1).getDatanodeUuid();
final String planArg = String.format("-%s -%s %s,%s",
REPORT, NODE, dataNodeUuid1, dataNodeUuid2);
final String cmdLine = String.format("hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
verifyOutputsOfReportCommand(outputs, dataNodeUuid1, dataNodeUuid2, true);
}
private void verifyOutputsOfReportCommand(List<String> outputs,
String dataNodeUuid1, String dataNodeUuid2, boolean inputNodesStr) {
assertThat(outputs.get(0)).contains("Processing report command");
if (inputNodesStr) {
assertThat(outputs.get(1)).contains("Reporting volume information for DataNode")
.contains(dataNodeUuid1, dataNodeUuid2);
}
// Since the order of input nodes will be disrupted when parse
// the node string, we should compare UUID with both output lines.
assertTrue(outputs.get(2).contains(dataNodeUuid1)
|| outputs.get(6).contains(dataNodeUuid1));
assertTrue(outputs.get(2).contains(dataNodeUuid2)
|| outputs.get(6).contains(dataNodeUuid2));
}
@Test
@Timeout(value = 60)
public void testReportCommandWithInvalidNode() throws Exception {
String dataNodeUuid1 = cluster.getDataNodes().get(0).getDatanodeUuid();
String invalidNode = "invalidNode";
final String planArg = String.format("-%s -%s %s,%s",
REPORT, NODE, dataNodeUuid1, invalidNode);
final String cmdLine = String.format("hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
assertThat(
outputs.get(0)).contains("Processing report command");
assertThat(
outputs.get(1)).contains("Reporting volume information for DataNode",
dataNodeUuid1, invalidNode);
String invalidNodeInfo =
String.format("The node(s) '%s' not found. "
+ "Please make sure that '%s' exists in the cluster.", invalidNode, invalidNode);
assertTrue(outputs.get(2).contains(invalidNodeInfo));
}
@Test
@Timeout(value = 60)
public void testReportCommandWithNullNodes() throws Exception {
// don't input nodes
final String planArg = String.format("-%s -%s ,", REPORT, NODE);
final String cmdLine = String.format("hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
String invalidNodeInfo = "The number of input nodes is 0. "
+ "Please input the valid nodes.";
assertTrue(outputs.get(2).contains(invalidNodeInfo));
}
@Test
@Timeout(value = 60)
public void testReportCommandWithReadingHostFile() throws Exception {
final String testDir = GenericTestUtils.getTestDir().getAbsolutePath();
File includeFile = new File(testDir, "diskbalancer.include");
String filePath = testDir + "/diskbalancer.include";
String dataNodeUuid1 = cluster.getDataNodes().get(0).getDatanodeUuid();
String dataNodeUuid2 = cluster.getDataNodes().get(1).getDatanodeUuid();
FileWriter fw = new FileWriter(filePath);
fw.write("#This-is-comment\n");
fw.write(dataNodeUuid1 + "\n");
fw.write(dataNodeUuid2 + "\n");
fw.close();
final String planArg = String.format("-%s -%s file://%s",
REPORT, NODE, filePath);
final String cmdLine = String.format("hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
verifyOutputsOfReportCommand(outputs, dataNodeUuid1, dataNodeUuid2, false);
includeFile.delete();
}
@Test
@Timeout(value = 60)
public void testReportCommandWithInvalidHostFilePath() throws Exception {
final String testDir = GenericTestUtils.getTestDir().getAbsolutePath();
String invalidFilePath = testDir + "/diskbalancer-invalid.include";
final String planArg = String.format("-%s -%s file://%s",
REPORT, NODE, invalidFilePath);
final String cmdLine = String.format("hdfs diskbalancer %s", planArg);
List<String> outputs = runCommand(cmdLine, cluster);
String invalidNodeInfo = String.format(
"The input host file path 'file://%s' is not a valid path.", invalidFilePath);
assertTrue(outputs.get(2).contains(invalidNodeInfo));
}
}
|
oracle/graal | 35,486 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/lir/gen/LIRGenerator.java | /*
* Copyright (c) 2009, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.lir.gen;
import static jdk.graal.compiler.core.common.GraalOptions.AlignJumpTableEntry;
import static jdk.graal.compiler.core.common.GraalOptions.LoopHeaderAlignment;
import static jdk.vm.ci.code.ValueUtil.asAllocatableValue;
import static jdk.vm.ci.code.ValueUtil.asStackSlot;
import static jdk.vm.ci.code.ValueUtil.isAllocatableValue;
import static jdk.vm.ci.code.ValueUtil.isIllegal;
import static jdk.vm.ci.code.ValueUtil.isLegal;
import static jdk.vm.ci.code.ValueUtil.isRegister;
import static jdk.vm.ci.code.ValueUtil.isStackSlot;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import jdk.graal.compiler.asm.Label;
import jdk.graal.compiler.core.common.LIRKind;
import jdk.graal.compiler.core.common.calc.Condition;
import jdk.graal.compiler.core.common.cfg.BasicBlock;
import jdk.graal.compiler.core.common.spi.ForeignCallLinkage;
import jdk.graal.compiler.core.common.spi.LIRKindTool;
import jdk.graal.compiler.core.common.type.Stamp;
import jdk.graal.compiler.debug.DebugCloseable;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.debug.TTY;
import jdk.graal.compiler.graph.NodeSourcePosition;
import jdk.graal.compiler.lir.ConstantValue;
import jdk.graal.compiler.lir.LIR;
import jdk.graal.compiler.lir.LIRFrameState;
import jdk.graal.compiler.lir.LIRInstruction;
import jdk.graal.compiler.lir.LIRValueUtil;
import jdk.graal.compiler.lir.LabelRef;
import jdk.graal.compiler.lir.StandardOp;
import jdk.graal.compiler.lir.SwitchStrategy;
import jdk.graal.compiler.lir.Variable;
import jdk.graal.compiler.lir.hashing.IntHasher;
import jdk.graal.compiler.nodes.spi.CoreProviders;
import jdk.graal.compiler.nodes.spi.CoreProvidersDelegate;
import jdk.graal.compiler.options.Option;
import jdk.graal.compiler.options.OptionKey;
import jdk.graal.compiler.options.OptionType;
import jdk.graal.compiler.options.OptionValues;
import jdk.vm.ci.code.CallingConvention;
import jdk.vm.ci.code.Register;
import jdk.vm.ci.code.RegisterAttributes;
import jdk.vm.ci.code.RegisterConfig;
import jdk.vm.ci.code.StackSlot;
import jdk.vm.ci.code.TargetDescription;
import jdk.vm.ci.meta.AllocatableValue;
import jdk.vm.ci.meta.Constant;
import jdk.vm.ci.meta.JavaConstant;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.PlatformKind;
import jdk.vm.ci.meta.Value;
import jdk.vm.ci.meta.ValueKind;
/**
* This class traverses the HIR instructions and generates LIR instructions from them.
*/
public abstract class LIRGenerator extends CoreProvidersDelegate implements LIRGeneratorTool {
private final int loopHeaderAlignment;
public static class Options {
// @formatter:off
@Option(help = "Print HIR along side LIR as the latter is generated", type = OptionType.Debug)
public static final OptionKey<Boolean> PrintIRWithLIR = new OptionKey<>(false);
@Option(help = "The trace level for the LIR generator", type = OptionType.Debug)
public static final OptionKey<Integer> TraceLIRGeneratorLevel = new OptionKey<>(0);
// @formatter:on
}
private final LIRKindTool lirKindTool;
private BasicBlock<?> currentBlock;
private LIRGenerationResult res;
protected final ArithmeticLIRGenerator arithmeticLIRGen;
protected final BarrierSetLIRGeneratorTool barrierSetLIRGen;
private final MoveFactory moveFactory;
private final boolean printIrWithLir;
private final int traceLIRGeneratorLevel;
@SuppressWarnings("this-escape")
public LIRGenerator(LIRKindTool lirKindTool, ArithmeticLIRGenerator arithmeticLIRGen, BarrierSetLIRGeneratorTool barrierSetLIRGen, MoveFactory moveFactory, CoreProviders providers,
LIRGenerationResult res) {
super(providers);
this.lirKindTool = lirKindTool;
this.arithmeticLIRGen = arithmeticLIRGen;
this.barrierSetLIRGen = barrierSetLIRGen;
this.res = res;
OptionValues options = res.getLIR().getOptions();
this.printIrWithLir = !TTY.isSuppressed() && Options.PrintIRWithLIR.getValue(options);
this.traceLIRGeneratorLevel = TTY.isSuppressed() ? 0 : Options.TraceLIRGeneratorLevel.getValue(options);
this.loopHeaderAlignment = LoopHeaderAlignment.getValue(options);
this.moveFactory = moveFactory;
arithmeticLIRGen.setLirGen(this);
}
@Override
public ArithmeticLIRGeneratorTool getArithmetic() {
return arithmeticLIRGen;
}
@Override
public BarrierSetLIRGeneratorTool getBarrierSet() {
return barrierSetLIRGen;
}
@Override
public MoveFactory getMoveFactory() {
return moveFactory;
}
private MoveFactory spillMoveFactory;
@Override
public MoveFactory getSpillMoveFactory() {
if (spillMoveFactory == null) {
boolean verify = false;
assert (verify = true) == true;
if (verify) {
spillMoveFactory = new VerifyingMoveFactory(moveFactory);
} else {
spillMoveFactory = moveFactory;
}
}
return spillMoveFactory;
}
@Override
public LIRKind getValueKind(JavaKind javaKind) {
return LIRKind.fromJavaKind(target().arch, javaKind);
}
@Override
public TargetDescription target() {
return getCodeCache().getTarget();
}
public LIRKindTool getLIRKindTool() {
return lirKindTool;
}
/**
* Hide {@link #nextVariable()} from other users.
*/
public abstract static class VariableProvider {
private int numVariables;
public int numVariables() {
return numVariables;
}
private int nextVariable() {
return numVariables++;
}
}
@Override
public Variable newVariable(ValueKind<?> valueKind) {
return new Variable(valueKind, ((VariableProvider) res.getLIR()).nextVariable());
}
@Override
public RegisterConfig getRegisterConfig() {
return res.getRegisterConfig();
}
public RegisterAttributes attributes(Register register) {
return getRegisterConfig().getAttributesMap().get(register.number);
}
@Override
public Variable emitMove(Value input) {
assert !LIRValueUtil.isVariable(input) : "Creating a copy of a variable via this method is not supported (and potentially a bug): " + input;
return emitMoveHelper(input.getValueKind(), input);
}
@Override
public Variable emitMove(ValueKind<?> dst, Value src) {
return emitMoveHelper(dst, src);
}
private Variable emitMoveHelper(ValueKind<?> dst, Value input) {
Variable result = newVariable(dst);
emitMove(result, input);
return result;
}
@Override
public void emitMove(AllocatableValue dst, Value src) {
append(moveFactory.createMove(dst, src));
}
@Override
public Variable emitReadRegister(Register register, ValueKind<?> kind) {
return emitMove(register.asValue(kind));
}
@Override
public void emitWriteRegister(Register dst, Value src, ValueKind<?> kind) {
emitMove(dst.asValue(kind), src);
}
@Override
public void emitMoveConstant(AllocatableValue dst, Constant src) {
append(moveFactory.createLoad(dst, src));
}
@Override
public boolean canInlineConstant(Constant constant) {
return moveFactory.canInlineConstant(constant);
}
@Override
public boolean mayEmbedConstantLoad(Constant constant) {
return moveFactory.mayEmbedConstantLoad(constant);
}
@Override
public Value emitConstant(LIRKind kind, Constant constant) {
if (moveFactory.canInlineConstant(constant)) {
return new ConstantValue(toRegisterKind(kind), constant);
} else {
return emitLoadConstant(toRegisterKind(kind), constant);
}
}
@Override
public Value emitJavaConstant(JavaConstant constant) {
return emitConstant(getValueKind(constant.getJavaKind()), constant);
}
@Override
public AllocatableValue emitLoadConstant(ValueKind<?> kind, Constant constant) {
Variable result = newVariable(kind);
emitMoveConstant(result, constant);
return result;
}
@Override
public AllocatableValue asAllocatable(Value value) {
if (isAllocatableValue(value)) {
return asAllocatableValue(value);
} else if (LIRValueUtil.isConstantValue(value)) {
return emitLoadConstant(value.getValueKind(), LIRValueUtil.asConstant(value));
} else {
return emitMove(value);
}
}
/**
* Determines if only oop maps are required for the code generated from the LIR.
*/
public boolean needOnlyOopMaps() {
return false;
}
/**
* Gets the ABI specific operand used to return a value of a given kind from a method.
*
* @param javaKind the kind of value being returned
* @param valueKind the backend type of the value being returned
* @return the operand representing the ABI defined location used return a value of kind
* {@code kind}
*/
public AllocatableValue resultOperandFor(JavaKind javaKind, ValueKind<?> valueKind) {
Register reg = getRegisterConfig().getReturnRegister(javaKind);
assert target().arch.canStoreValue(reg.getRegisterCategory(), valueKind.getPlatformKind()) : reg.getRegisterCategory() + " " + valueKind.getPlatformKind();
return reg.asValue(valueKind);
}
NodeSourcePosition currentPosition;
public void setSourcePosition(NodeSourcePosition position) {
currentPosition = position;
}
private static boolean verify(final LIRInstruction op) {
op.visitEachInput(LIRGenerator::allowed);
op.visitEachAlive(LIRGenerator::allowed);
op.visitEachState(LIRGenerator::allowed);
op.visitEachTemp(LIRGenerator::allowed);
op.visitEachOutput(LIRGenerator::allowed);
op.verify();
return true;
}
// @formatter:off
private static void allowed(Object op, Value val, LIRInstruction.OperandMode mode, EnumSet<LIRInstruction.OperandFlag> flags) {
Value value = LIRValueUtil.stripCast(val);
if ((LIRValueUtil.isVariable(value) && flags.contains(LIRInstruction.OperandFlag.REG)) ||
(isRegister(value) && flags.contains(LIRInstruction.OperandFlag.REG)) ||
(LIRValueUtil.isStackSlotValue(value) && flags.contains(LIRInstruction.OperandFlag.STACK)) ||
(LIRValueUtil.isConstantValue(value) && flags.contains(LIRInstruction.OperandFlag.CONST) && mode != LIRInstruction.OperandMode.DEF) ||
(isIllegal(value) && flags.contains(LIRInstruction.OperandFlag.ILLEGAL))) {
return;
}
throw new GraalError("Invalid LIR%n Instruction: %s%n Mode: %s%n Flags: %s%n Unexpected value: %s %s",
op, mode, flags, value.getClass().getSimpleName(), value);
}
// @formatter:on
@Override
public <I extends LIRInstruction> I append(I op) {
LIR lir = res.getLIR();
if (printIrWithLir) {
TTY.println(op.toStringWithIdPrefix());
TTY.println();
}
assert verify(op);
ArrayList<LIRInstruction> lirForBlock = lir.getLIRforBlock(getCurrentBlock());
op.setPosition(currentPosition);
assert !lirForBlock.contains(op) : "added " + op + " twice";
lirForBlock.add(op);
return op;
}
public boolean hasBlockEnd(BasicBlock<?> block) {
ArrayList<LIRInstruction> ops = getResult().getLIR().getLIRforBlock(block);
if (ops.size() == 0) {
return false;
}
return ops.get(ops.size() - 1) instanceof StandardOp.BlockEndOp;
}
private final class BlockScopeImpl extends BlockScope {
private BlockScopeImpl(BasicBlock<?> block) {
currentBlock = block;
}
private void doBlockStart() {
if (printIrWithLir) {
TTY.print(currentBlock.toString());
}
// set up the list of LIR instructions
assert res.getLIR().getLIRforBlock(currentBlock) == null : "LIR list already computed for this block";
res.getLIR().setLIRforBlock(currentBlock, new ArrayList<>());
append(new StandardOp.LabelOp(new Label(currentBlock.getId()), currentBlock.isAligned() ? loopHeaderAlignment : 0));
if (traceLIRGeneratorLevel >= 1) {
TTY.println("BEGIN Generating LIR for block B" + currentBlock.getId());
}
}
private void doBlockEnd() {
if (traceLIRGeneratorLevel >= 1) {
TTY.println("END Generating LIR for block B" + currentBlock.getId());
}
if (printIrWithLir) {
TTY.println();
}
currentBlock = null;
}
@Override
public BasicBlock<?> getCurrentBlock() {
return currentBlock;
}
@Override
public void close() {
doBlockEnd();
}
}
public final BlockScope getBlockScope(BasicBlock<?> block) {
BlockScopeImpl blockScope = new BlockScopeImpl(block);
blockScope.doBlockStart();
return blockScope;
}
private final class MatchScope implements DebugCloseable {
private MatchScope(BasicBlock<?> block) {
currentBlock = block;
}
@Override
public void close() {
currentBlock = null;
}
}
public final DebugCloseable getMatchScope(BasicBlock<?> block) {
MatchScope matchScope = new MatchScope(block);
return matchScope;
}
public void emitIncomingValues(Value[] params) {
((StandardOp.LabelOp) res.getLIR().getLIRforBlock(getCurrentBlock()).get(0)).setIncomingValues(params);
}
@Override
public abstract void emitJump(LabelRef label);
public abstract void emitCompareBranch(PlatformKind cmpKind, Value left, Value right, Condition cond, boolean unorderedIsTrue, LabelRef trueDestination, LabelRef falseDestination,
double trueDestinationProbability);
public abstract void emitOverflowCheckBranch(LabelRef overflow, LabelRef noOverflow, LIRKind cmpKind, double overflowProbability);
public abstract void emitIntegerTestBranch(Value left, Value right, LabelRef trueDestination, LabelRef falseDestination, double trueSuccessorProbability);
public abstract void emitOpMaskTestBranch(Value left, boolean negateLeft, Value right, LabelRef trueDestination, LabelRef falseDestination, double trueSuccessorProbability);
public abstract void emitOpMaskOrTestBranch(Value left, Value right, boolean allZeros, LabelRef trueDestination, LabelRef falseDestination, double trueSuccessorProbability);
@Override
public abstract Variable emitConditionalMove(PlatformKind cmpKind, Value leftVal, Value right, Condition cond, boolean unorderedIsTrue, Value trueValue, Value falseValue);
@Override
public abstract Variable emitIntegerTestMove(Value leftVal, Value right, Value trueValue, Value falseValue);
public abstract Variable emitOpMaskTestMove(Value leftVal, boolean negateLeft, Value right, Value trueValue, Value falseValue);
public abstract Variable emitOpMaskOrTestMove(Value leftVal, Value right, boolean allZeros, Value trueValue, Value falseValue);
/** Loads the target address for indirect {@linkplain #emitForeignCall foreign calls}. */
protected Value emitIndirectForeignCallAddress(@SuppressWarnings("unused") ForeignCallLinkage linkage) {
return null;
}
/**
* Emits the single call operation at the heart of generating LIR for a
* {@linkplain #emitForeignCall foreign call}.
*/
protected abstract void emitForeignCallOp(ForeignCallLinkage linkage, Value targetAddress, Value result, Value[] arguments, Value[] temps, LIRFrameState info);
@Override
public Variable emitForeignCall(ForeignCallLinkage linkage, LIRFrameState frameState, Value... args) {
LIRFrameState state = null;
if (linkage.needsDebugInfo()) {
if (frameState != null) {
state = frameState;
} else {
assert needOnlyOopMaps();
state = new LIRFrameState(null, null, null, false);
}
}
Value targetAddress = emitIndirectForeignCallAddress(linkage);
// move the arguments into the correct location
CallingConvention linkageCc = linkage.getOutgoingCallingConvention();
res.getFrameMapBuilder().callsMethod(linkageCc);
assert linkageCc.getArgumentCount() == args.length : "argument count mismatch";
Value[] argLocations = new Value[args.length];
for (int i = 0; i < args.length; i++) {
Value arg = args[i];
AllocatableValue loc = linkageCc.getArgument(i);
emitMove(loc, arg);
argLocations[i] = loc;
}
res.setForeignCall(true);
emitForeignCallOp(linkage, targetAddress, linkageCc.getReturn(), argLocations, linkage.getTemporaries(), state);
if (isLegal(linkageCc.getReturn())) {
return emitMove(linkageCc.getReturn());
} else {
return null;
}
}
/**
* If we decide that the switch strategy is better in terms of throughput, do it, this means we
* consider an indirect jump to be JUMP_TABLE_THRESHOLD times as expensive as a compare and
* branch.
*/
private static final double JUMP_TABLE_THRESHOLD = 3;
/**
* Try to come up with a preferable way to execute a TableSwitch.
*
* <ol>
* <li>If a series of compare and branches is deemed better throughput-wise, then it should be a
* win both in terms of time and space. As a result, it would be chosen. Note that this is not
* absolutely true, though, as a typical direct jump table entry is 4 bytes while a compare and
* branch is often 8 bytes on A64 and 10 bytes on x64.</li>
* <li>Else if the table is dense enough or it is not too large, then we generate a direct jump
* table.</li>
* <li>Else we try to find a subset of keys that contains the majority of the jump targets by
* frequency. If there is such a subset that is dense enough or not too large, we generate a
* jump table for that subset, the remaining targets including the default one are reached with
* a series of compare and branches in a slowpath stub.</li>
* <li>Else we try to generate a hashed jump table, if the hashed jump table is dense enough
* then we choose it. Note that a hashed jump table is larger than a direct one so we do not
* need to consider its size now.</li>
* <li>Else we fall back to a series of compare and branches.</li>
* </ol>
*/
public void emitStrategySwitch(JavaConstant[] keyConstants, double[] keyProbabilities, LabelRef[] keyTargets, LabelRef defaultTarget, AllocatableValue value,
boolean inputMayBeOutOfRange, boolean mayEmitThreadedCode) {
SwitchStrategy strategy = SwitchStrategy.getBestStrategy(keyProbabilities, keyConstants, keyTargets);
if (strategy.getAverageEffort() < JUMP_TABLE_THRESHOLD) {
emitStrategySwitch(strategy, value, keyTargets, defaultTarget);
return;
}
// If the density of the jump table would be larger than this value then we will emit one
double minDensity = 1 / Math.sqrt(strategy.getAverageEffort());
Subrange subrangeForDirectJump = findSubrangeForDirectJumpTable(keyConstants, keyProbabilities, minDensity, mayEmitThreadedCode);
if (subrangeForDirectJump != null) {
int loIdx = subrangeForDirectJump.loIdx;
// Micro-optimization, try to extend minValue to 0 so that we may avoid the subtraction
int minValue = keyConstants[loIdx].asInt();
if (minValue > 0) {
int extendedLoIdx = 0;
for (; extendedLoIdx <= loIdx; extendedLoIdx++) {
if (keyConstants[extendedLoIdx].asInt() >= 0) {
break;
}
}
if (mayEmitThreadedCode || directJumpForSubrange(keyConstants, minDensity, 0, extendedLoIdx, subrangeForDirectJump.hiIdx)) {
loIdx = extendedLoIdx;
minValue = 0;
}
}
emitDirectJumpTableHelper(keyConstants, keyProbabilities, keyTargets, defaultTarget, value, minValue, loIdx, subrangeForDirectJump.hiIdx, inputMayBeOutOfRange, mayEmitThreadedCode);
return;
}
// Try a hashed jump table
Optional<IntHasher> hasher = hasherFor(keyConstants);
if (hasher.isEmpty()) {
emitStrategySwitch(strategy, value, keyTargets, defaultTarget);
return;
}
IntHasher h = hasher.get();
double hashDensity = (double) keyConstants.length / Integer.toUnsignedLong(h.cardinality);
// No need to check for size since if this table is too big for a guaranteed direct jump
// then it is surely too big for a guaranteed hashed jump
if (hashDensity >= minDensity) {
emitHashedJumpTableHelper(keyConstants, keyTargets, defaultTarget, value, h);
} else {
emitStrategySwitch(strategy, value, keyTargets, defaultTarget);
}
}
/**
* A subrange of the keyConstants array.
*/
private record Subrange(int loIdx, int hiIdx) {
}
/**
* The threshold at which a subrange is considered a common path and we will decide if a jump
* table for that subrange is acceptable.
*/
private static final double MAJORITY_THRESHOLD = 0.99;
/**
* The general approach is to check the most frequent targets to see if we can emit a direct
* jump table with them, the default target will now check the other remaining targets. In the
* best case, our subrange will cover the whole range and there is no remaining target.
*/
private static Subrange findSubrangeForDirectJumpTable(JavaConstant[] keyConstants, double[] keyProbabilities, double minDensity, boolean mayEmitThreadedCode) {
if (mayEmitThreadedCode || directJumpForSubrange(keyConstants, minDensity, keyConstants[0].asInt(), 0, keyConstants.length - 1)) {
// If all cases can be fit into a jump table then ignore the probability
return new Subrange(0, keyConstants.length - 1);
}
// The probability of all key targets starting from loIdx
double loRangeProbability = 0;
// Since loIdx starts at 0, this is the combined probability of all key targets
for (int i = 0; i < keyConstants.length; i++) {
loRangeProbability += keyProbabilities[i];
}
// This loop nest simply traverses all the subranges
for (int loIdx = 0; loIdx < keyConstants.length; loIdx++) {
double subrangeProbability = loRangeProbability;
if (subrangeProbability < MAJORITY_THRESHOLD) {
break;
}
// Walk backward so we can have the largest subrange possible
for (int hiIdx = keyConstants.length - 1; hiIdx >= loIdx; hiIdx--) {
if (subrangeProbability < MAJORITY_THRESHOLD) {
break;
}
if (directJumpForSubrange(keyConstants, minDensity, keyConstants[loIdx].asInt(), loIdx, hiIdx)) {
return new Subrange(loIdx, hiIdx);
}
subrangeProbability -= keyProbabilities[hiIdx];
}
loRangeProbability -= keyProbabilities[loIdx];
}
// No preferable subrange found
return null;
}
private void emitDirectJumpTableHelper(JavaConstant[] keyConstants, double[] keyProbs, LabelRef[] keyTargets, LabelRef defaultTarget, AllocatableValue value, int minValue, int loIdx, int hiIdx,
boolean inputMayBeOutOfRange, boolean mayEmitThreadedCode) {
int maxValue = keyConstants[hiIdx].asInt();
// This cannot overflow because we have ensured that above
int subrangeValueRange = maxValue - minValue + 1;
GraalError.guarantee(Integer.compareUnsigned(subrangeValueRange, Integer.MAX_VALUE) <= 0, "too large jump table: %s - %d - %d", Arrays.toString(keyConstants), minValue, maxValue);
LabelRef[] targets = new LabelRef[subrangeValueRange];
Arrays.fill(targets, defaultTarget);
for (int i = loIdx; i <= hiIdx; i++) {
targets[keyConstants[i].asInt() - minValue] = keyTargets[i];
}
// Use a switch strategy to process the remaining targets
int remainingKeyCount = keyConstants.length - (hiIdx - loIdx + 1);
SwitchStrategy remainingStrategy = null;
LabelRef[] remainingKeyTargets = null;
if (remainingKeyCount > 0) {
JavaConstant[] remainingKeyConstants = new JavaConstant[remainingKeyCount];
double[] remainingKeyProbabilities = new double[remainingKeyCount];
remainingKeyTargets = new LabelRef[remainingKeyCount];
int i = 0;
for (int j = 0; j < loIdx; j++) {
remainingKeyConstants[i] = keyConstants[j];
remainingKeyProbabilities[i] = keyProbs[j];
remainingKeyTargets[i] = keyTargets[j];
i++;
}
for (int j = hiIdx + 1; j < keyConstants.length; j++) {
remainingKeyConstants[i] = keyConstants[j];
remainingKeyProbabilities[i] = keyProbs[j];
remainingKeyTargets[i] = keyTargets[j];
i++;
}
remainingStrategy = SwitchStrategy.getBestStrategy(remainingKeyProbabilities, remainingKeyConstants, remainingKeyTargets);
}
if (AlignJumpTableEntry.getValue(getResult().getLIR().getOptions())) {
for (LabelRef jumpTableEntry : targets) {
jumpTableEntry.getTargetBlock().setAlign(true);
}
}
emitRangeTableSwitch(minValue, defaultTarget, targets, remainingStrategy, remainingKeyTargets, value, inputMayBeOutOfRange, mayEmitThreadedCode);
}
private void emitHashedJumpTableHelper(JavaConstant[] keyConstants, LabelRef[] keyTargets, LabelRef defaultTarget, AllocatableValue value, IntHasher h) {
LabelRef[] targets = new LabelRef[h.cardinality];
JavaConstant[] keys = new JavaConstant[h.cardinality];
for (int i = 0; i < h.cardinality; i++) {
keys[i] = JavaConstant.INT_0;
targets[i] = defaultTarget;
}
for (int i = 0; i < keyConstants.length; i++) {
int idx = h.hash(keyConstants[i].asInt());
keys[idx] = keyConstants[i];
targets[idx] = keyTargets[i];
}
emitHashTableSwitch(h, keys, defaultTarget, targets, value);
}
/**
* If the size of the jump table is smaller than this value then we emit one regardless the
* table density.
*/
private static final int MAX_DIRECT_SIZE = 128;
/**
* Each jump table entry is 4 bytes on AArch64 or 8 bytes on AMD64, so if the jump table is too
* sparse we may waste a lot of space. As a result, we emit a table if it is not too large, OR
* if it is dense enough.
*/
private static boolean directJumpForSubrange(JavaConstant[] keyConstants, double minDensity, int minValue, int loIdx, int hiIdx) {
// Subtraction of 2 sorted signed values will not overflow the unsigned range
// Must not add 1 here because we can overflow -1 -> 0
int valueRangeMinus1 = keyConstants[hiIdx].asInt() - minValue;
if (Integer.compareUnsigned(valueRangeMinus1, Integer.MAX_VALUE) >= 0) {
// Too large range, reject to avoid weird edge cases
return false;
}
double density = (double) (hiIdx - loIdx + 1) / (Integer.toUnsignedLong(valueRangeMinus1) + 1);
return Integer.compareUnsigned(valueRangeMinus1, MAX_DIRECT_SIZE) < 0 || density >= minDensity;
}
public abstract void emitStrategySwitch(SwitchStrategy strategy, AllocatableValue key, LabelRef[] keyTargets, LabelRef defaultTarget);
/**
* Emit a direct jump table with {@code targets.length} consecutive keys starting at {@code
* lowKey}. If {@code remainingStrategy == null}, all other values of {@code key} will result in
* the control flow being transferred to {@code defaultTarget}. Otherwise, {@code
* remainingStrategy} will decide the jump destination among {@code remainingTargets} and {@code
* defaultTarget} when the value of {@code key} is not in the jump table.
*/
protected abstract void emitRangeTableSwitch(int lowKey, LabelRef defaultTarget, LabelRef[] targets, SwitchStrategy remainingStrategy, LabelRef[] remainingTargets, AllocatableValue key,
boolean inputMayBeOutOfRange, boolean mayEmitThreadedCode);
protected abstract void emitHashTableSwitch(JavaConstant[] keys, LabelRef defaultTarget, LabelRef[] targets, AllocatableValue value, Value hash);
private static Optional<IntHasher> hasherFor(JavaConstant[] keyConstants) {
int[] keys = new int[keyConstants.length];
for (int i = 0; i < keyConstants.length; i++) {
keys[i] = keyConstants[i].asInt();
}
return IntHasher.forKeys(keys);
}
private void emitHashTableSwitch(IntHasher hasher, JavaConstant[] keys, LabelRef defaultTarget, LabelRef[] targets, AllocatableValue value) {
Value hash = value;
if (hasher.factor > 1) {
Value factor = emitJavaConstant(JavaConstant.forShort(hasher.factor));
hash = arithmeticLIRGen.emitMul(hash, factor, false);
}
if (hasher.shift > 0) {
Value shift = emitJavaConstant(JavaConstant.forByte(hasher.shift));
hash = arithmeticLIRGen.emitShr(hash, shift);
}
Value cardinalityAnd = emitJavaConstant(JavaConstant.forInt(hasher.cardinality - 1));
hash = arithmeticLIRGen.emitAnd(hash, cardinalityAnd);
emitHashTableSwitch(keys, defaultTarget, targets, value, hash);
}
/**
* Called just before register allocation is performed on the LIR owned by this generator.
* Overriding implementations of this method must call the overridden method.
*/
public void beforeRegisterAllocation() {
}
/**
* Gets a garbage value for a given kind.
*/
protected abstract JavaConstant zapValueForKind(PlatformKind kind);
@Override
public LIRKind getLIRKind(Stamp stamp) {
return stamp.getLIRKind(lirKindTool);
}
@Override
public BasicBlock<?> getCurrentBlock() {
return currentBlock;
}
@Override
public LIRGenerationResult getResult() {
return res;
}
@Override
public void emitBlackhole(Value operand) {
append(new StandardOp.BlackholeOp(operand));
}
@Override
public LIRInstruction createBenchmarkCounter(String name, String group, Value increment) {
throw GraalError.unimplementedOverride(); // ExcludeFromJacocoGeneratedReport
}
@Override
public LIRInstruction createMultiBenchmarkCounter(String[] names, String[] groups, Value[] increments) {
throw GraalError.unimplementedOverride(); // ExcludeFromJacocoGeneratedReport
}
@Override
public abstract LIRInstruction createZapRegisters(Register[] zappedRegisters, JavaConstant[] zapValues);
@Override
public LIRInstruction createZapRegisters() {
Register[] zappedRegisters = getResult().getFrameMap().getRegisterConfig().getAllocatableRegisters().toArray(Register[]::new);
return createZapRegisters(zappedRegisters);
}
@Override
public LIRInstruction createZapRegisters(Register[] zappedRegisters) {
JavaConstant[] zapValues = new JavaConstant[zappedRegisters.length];
for (int i = 0; i < zappedRegisters.length; i++) {
PlatformKind kind = target().arch.getLargestStorableKind(zappedRegisters[i].getRegisterCategory());
zapValues[i] = zapValueForKind(kind);
}
return createZapRegisters(zappedRegisters, zapValues);
}
@Override
public abstract LIRInstruction createZapArgumentSpace(StackSlot[] zappedStack, JavaConstant[] zapValues);
@Override
public LIRInstruction zapArgumentSpace() {
List<StackSlot> slots = null;
CallingConvention cc = res.getCallingConvention();
for (AllocatableValue arg : cc.getArguments()) {
if (isStackSlot(arg)) {
if (slots == null) {
slots = new ArrayList<>();
}
slots.add((StackSlot) arg);
} else {
assert !LIRValueUtil.isVirtualStackSlot(arg);
}
}
if (slots != null && isStackSlot(cc.getReturn())) {
// Some calling conventions pass their return value through the stack so make sure not
// to kill the return value.
slots.remove(asStackSlot(cc.getReturn()));
}
if (slots == null || slots.size() == 0) {
return null;
}
StackSlot[] zappedStack = slots.toArray(new StackSlot[slots.size()]);
JavaConstant[] zapValues = new JavaConstant[zappedStack.length];
for (int i = 0; i < zappedStack.length; i++) {
PlatformKind kind = zappedStack[i].getPlatformKind();
zapValues[i] = zapValueForKind(kind);
}
return createZapArgumentSpace(zappedStack, zapValues);
}
/**
* Returns the offset of the array length word in an array object's header.
*/
public abstract int getArrayLengthOffset();
/**
* Returns the offset of the first array element.
*/
public int getArrayBaseOffset(JavaKind elementKind) {
return getMetaAccess().getArrayBaseOffset(elementKind);
}
}
|
openjdk/jdk8 | 35,097 | jaxp/src/com/sun/org/apache/xpath/internal/NodeSetDTM.java | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: NodeSetDTM.java,v 1.2.4.2 2005/09/14 20:30:06 jeffsuttor Exp $
*/
package com.sun.org.apache.xpath.internal;
import com.sun.org.apache.xalan.internal.res.XSLMessages;
import com.sun.org.apache.xml.internal.dtm.DTM;
import com.sun.org.apache.xml.internal.dtm.DTMFilter;
import com.sun.org.apache.xml.internal.dtm.DTMIterator;
import com.sun.org.apache.xml.internal.dtm.DTMManager;
import com.sun.org.apache.xml.internal.utils.NodeVector;
import com.sun.org.apache.xpath.internal.res.XPATHErrorResources;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.traversal.NodeIterator;
/**
* <p>The NodeSetDTM class can act as either a NodeVector,
* NodeList, or NodeIterator. However, in order for it to
* act as a NodeVector or NodeList, it's required that
* setShouldCacheNodes(true) be called before the first
* nextNode() is called, in order that nodes can be added
* as they are fetched. Derived classes that implement iterators
* must override runTo(int index), in order that they may
* run the iteration to the given index. </p>
*
* <p>Note that we directly implement the DOM's NodeIterator
* interface. We do not emulate all the behavior of the
* standard NodeIterator. In particular, we do not guarantee
* to present a "live view" of the document ... but in XSLT,
* the source document should never be mutated, so this should
* never be an issue.</p>
*
* <p>Thought: Should NodeSetDTM really implement NodeList and NodeIterator,
* or should there be specific subclasses of it which do so? The
* advantage of doing it all here is that all NodeSetDTMs will respond
* to the same calls; the disadvantage is that some of them may return
* less-than-enlightening results when you do so.</p>
* @xsl.usage advanced
*/
public class NodeSetDTM extends NodeVector
implements /* NodeList, NodeIterator, */ DTMIterator,
Cloneable
{
static final long serialVersionUID = 7686480133331317070L;
/**
* Create an empty nodelist.
*/
public NodeSetDTM(DTMManager dtmManager)
{
super();
m_manager = dtmManager;
}
/**
* Create an empty, using the given block size.
*
* @param blocksize Size of blocks to allocate
* @param dummy pass zero for right now...
*/
public NodeSetDTM(int blocksize, int dummy, DTMManager dtmManager)
{
super(blocksize);
m_manager = dtmManager;
}
// %TBD%
// /**
// * Create a NodeSetDTM, and copy the members of the
// * given nodelist into it.
// *
// * @param nodelist List of Nodes to be made members of the new set.
// */
// public NodeSetDTM(NodeList nodelist)
// {
//
// super();
//
// addNodes(nodelist);
// }
/**
* Create a NodeSetDTM, and copy the members of the
* given NodeSetDTM into it.
*
* @param nodelist Set of Nodes to be made members of the new set.
*/
public NodeSetDTM(NodeSetDTM nodelist)
{
super();
m_manager = nodelist.getDTMManager();
m_root = nodelist.getRoot();
addNodes((DTMIterator) nodelist);
}
/**
* Create a NodeSetDTM, and copy the members of the
* given DTMIterator into it.
*
* @param ni Iterator which yields Nodes to be made members of the new set.
*/
public NodeSetDTM(DTMIterator ni)
{
super();
m_manager = ni.getDTMManager();
m_root = ni.getRoot();
addNodes(ni);
}
/**
* Create a NodeSetDTM, and copy the members of the
* given DTMIterator into it.
*
* @param iterator Iterator which yields Nodes to be made members of the new set.
*/
public NodeSetDTM(NodeIterator iterator, XPathContext xctxt)
{
super();
Node node;
m_manager = xctxt.getDTMManager();
while (null != (node = iterator.nextNode()))
{
int handle = xctxt.getDTMHandleFromNode(node);
addNodeInDocOrder(handle, xctxt);
}
}
/**
* Create a NodeSetDTM, and copy the members of the
* given DTMIterator into it.
*
*/
public NodeSetDTM(NodeList nodeList, XPathContext xctxt)
{
super();
m_manager = xctxt.getDTMManager();
int n = nodeList.getLength();
for (int i = 0; i < n; i++)
{
Node node = nodeList.item(i);
int handle = xctxt.getDTMHandleFromNode(node);
// Do not reorder or strip duplicate nodes from the given DOM nodelist
addNode(handle); // addNodeInDocOrder(handle, xctxt);
}
}
/**
* Create a NodeSetDTM which contains the given Node.
*
* @param node Single node to be added to the new set.
*/
public NodeSetDTM(int node, DTMManager dtmManager)
{
super();
m_manager = dtmManager;
addNode(node);
}
/**
* Set the environment in which this iterator operates, which should provide:
* a node (the context node... same value as "root" defined below)
* a pair of non-zero positive integers (the context position and the context size)
* a set of variable bindings
* a function library
* the set of namespace declarations in scope for the expression.
*
* <p>At this time the exact implementation of this environment is application
* dependent. Probably a proper interface will be created fairly soon.</p>
*
* @param environment The environment object.
*/
public void setEnvironment(Object environment)
{
// no-op
}
/**
* @return The root node of the Iterator, as specified when it was created.
* For non-Iterator NodeSetDTMs, this will be null.
*/
public int getRoot()
{
if(DTM.NULL == m_root)
{
if(size() > 0)
return item(0);
else
return DTM.NULL;
}
else
return m_root;
}
/**
* Initialize the context values for this expression
* after it is cloned.
*
* @param context The XPath runtime context for this
* transformation.
*/
public void setRoot(int context, Object environment)
{
// no-op, I guess... (-sb)
}
/**
* Clone this NodeSetDTM.
* At this time, we only expect this to be used with LocPathIterators;
* it may not work with other kinds of NodeSetDTMs.
*
* @return a new NodeSetDTM of the same type, having the same state...
* though unless overridden in the subclasses, it may not copy all
* the state information.
*
* @throws CloneNotSupportedException if this subclass of NodeSetDTM
* does not support the clone() operation.
*/
public Object clone() throws CloneNotSupportedException
{
NodeSetDTM clone = (NodeSetDTM) super.clone();
return clone;
}
/**
* Get a cloned Iterator, and reset its state to the beginning of the
* iteration.
*
* @return a new NodeSetDTM of the same type, having the same state...
* except that the reset() operation has been called.
*
* @throws CloneNotSupportedException if this subclass of NodeSetDTM
* does not support the clone() operation.
*/
public DTMIterator cloneWithReset() throws CloneNotSupportedException
{
NodeSetDTM clone = (NodeSetDTM) clone();
clone.reset();
return clone;
}
/**
* Reset the iterator. May have no effect on non-iterator Nodesets.
*/
public void reset()
{
m_next = 0;
}
/**
* This attribute determines which node types are presented via the
* iterator. The available set of constants is defined in the
* <code>DTMFilter</code> interface. For NodeSetDTMs, the mask has been
* hardcoded to show all nodes except EntityReference nodes, which have
* no equivalent in the XPath data model.
*
* @return integer used as a bit-array, containing flags defined in
* the DOM's DTMFilter class. The value will be
* <code>SHOW_ALL & ~SHOW_ENTITY_REFERENCE</code>, meaning that
* only entity references are suppressed.
*/
public int getWhatToShow()
{
return DTMFilter.SHOW_ALL & ~DTMFilter.SHOW_ENTITY_REFERENCE;
}
/**
* The filter object used to screen nodes. Filters are applied to
* further reduce (and restructure) the DTMIterator's view of the
* document. In our case, we will be using hardcoded filters built
* into our iterators... but getFilter() is part of the DOM's
* DTMIterator interface, so we have to support it.
*
* @return null, which is slightly misleading. True, there is no
* user-written filter object, but in fact we are doing some very
* sophisticated custom filtering. A DOM purist might suggest
* returning a placeholder object just to indicate that this is
* not going to return all nodes selected by whatToShow.
*/
public DTMFilter getFilter()
{
return null;
}
/**
* The value of this flag determines whether the children of entity
* reference nodes are visible to the iterator. If false, they will be
* skipped over.
* <br> To produce a view of the document that has entity references
* expanded and does not expose the entity reference node itself, use the
* whatToShow flags to hide the entity reference node and set
* expandEntityReferences to true when creating the iterator. To produce
* a view of the document that has entity reference nodes but no entity
* expansion, use the whatToShow flags to show the entity reference node
* and set expandEntityReferences to false.
*
* @return true for all iterators based on NodeSetDTM, meaning that the
* contents of EntityRefrence nodes may be returned (though whatToShow
* says that the EntityReferences themselves are not shown.)
*/
public boolean getExpandEntityReferences()
{
return true;
}
/**
* Get an instance of a DTM that "owns" a node handle. Since a node
* iterator may be passed without a DTMManager, this allows the
* caller to easily get the DTM using just the iterator.
*
* @param nodeHandle the nodeHandle.
*
* @return a non-null DTM reference.
*/
public DTM getDTM(int nodeHandle)
{
return m_manager.getDTM(nodeHandle);
}
/* An instance of the DTMManager. */
DTMManager m_manager;
/**
* Get an instance of the DTMManager. Since a node
* iterator may be passed without a DTMManager, this allows the
* caller to easily get the DTMManager using just the iterator.
*
* @return a non-null DTMManager reference.
*/
public DTMManager getDTMManager()
{
return m_manager;
}
/**
* Returns the next node in the set and advances the position of the
* iterator in the set. After a DTMIterator is created, the first call
* to nextNode() returns the first node in the set.
* @return The next <code>Node</code> in the set being iterated over, or
* <code>DTM.NULL</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
*/
public int nextNode()
{
if ((m_next) < this.size())
{
int next = this.elementAt(m_next);
m_next++;
return next;
}
else
return DTM.NULL;
}
/**
* Returns the previous node in the set and moves the position of the
* iterator backwards in the set.
* @return The previous <code>Node</code> in the set being iterated over,
* or<code>DTM.NULL</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a cached type, and hence doesn't know what the previous node was.
*/
public int previousNode()
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_ITERATE, null)); //"This NodeSetDTM can not iterate to a previous node!");
if ((m_next - 1) > 0)
{
m_next--;
return this.elementAt(m_next);
}
else
return DTM.NULL;
}
/**
* Detaches the iterator from the set which it iterated over, releasing
* any computational resources and placing the iterator in the INVALID
* state. After<code>detach</code> has been invoked, calls to
* <code>nextNode</code> or<code>previousNode</code> will raise the
* exception INVALID_STATE_ERR.
* <p>
* This operation is a no-op in NodeSetDTM, and will not cause
* INVALID_STATE_ERR to be raised by later operations.
* </p>
*/
public void detach(){}
/**
* Specify if it's OK for detach to release the iterator for reuse.
*
* @param allowRelease true if it is OK for detach to release this iterator
* for pooling.
*/
public void allowDetachToRelease(boolean allowRelease)
{
// no action for right now.
}
/**
* Tells if this NodeSetDTM is "fresh", in other words, if
* the first nextNode() that is called will return the
* first node in the set.
*
* @return true if nextNode() would return the first node in the set,
* false if it would return a later one.
*/
public boolean isFresh()
{
return (m_next == 0);
}
/**
* If an index is requested, NodeSetDTM will call this method
* to run the iterator to the index. By default this sets
* m_next to the index. If the index argument is -1, this
* signals that the iterator should be run to the end.
*
* @param index Position to advance (or retreat) to, with
* 0 requesting the reset ("fresh") position and -1 (or indeed
* any out-of-bounds value) requesting the final position.
* @throws RuntimeException thrown if this NodeSetDTM is not
* one of the types which supports indexing/counting.
*/
public void runTo(int index)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_INDEX, null)); //"This NodeSetDTM can not do indexing or counting functions!");
if ((index >= 0) && (m_next < m_firstFree))
m_next = index;
else
m_next = m_firstFree - 1;
}
/**
* Returns the <code>index</code>th item in the collection. If
* <code>index</code> is greater than or equal to the number of nodes in
* the list, this returns <code>null</code>.
*
* TODO: What happens if index is out of range?
*
* @param index Index into the collection.
* @return The node at the <code>index</code>th position in the
* <code>NodeList</code>, or <code>null</code> if that is not a valid
* index.
*/
public int item(int index)
{
runTo(index);
return this.elementAt(index);
}
/**
* The number of nodes in the list. The range of valid child node indices is
* 0 to <code>length-1</code> inclusive. Note that this operation requires
* finding all the matching nodes, which may defeat attempts to defer
* that work.
*
* @return integer indicating how many nodes are represented by this list.
*/
public int getLength()
{
runTo(-1);
return this.size();
}
/**
* Add a node to the NodeSetDTM. Not all types of NodeSetDTMs support this
* operation
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void addNode(int n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
this.addElement(n);
}
/**
* Insert a node at a given position.
*
* @param n Node to be added
* @param pos Offset at which the node is to be inserted,
* with 0 being the first position.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void insertNode(int n, int pos)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
insertElementAt(n, pos);
}
/**
* Remove a node.
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void removeNode(int n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
this.removeElement(n);
}
// %TBD%
// /**
// * Copy NodeList members into this nodelist, adding in
// * document order. If a node is null, don't add it.
// *
// * @param nodelist List of nodes which should now be referenced by
// * this NodeSetDTM.
// * @throws RuntimeException thrown if this NodeSetDTM is not of
// * a mutable type.
// */
// public void addNodes(NodeList nodelist)
// {
//
// if (!m_mutable)
// throw new RuntimeException("This NodeSetDTM is not mutable!");
//
// if (null != nodelist) // defensive to fix a bug that Sanjiva reported.
// {
// int nChildren = nodelist.getLength();
//
// for (int i = 0; i < nChildren; i++)
// {
// int obj = nodelist.item(i);
//
// if (null != obj)
// {
// addElement(obj);
// }
// }
// }
//
// // checkDups();
// }
// %TBD%
// /**
// * <p>Copy NodeList members into this nodelist, adding in
// * document order. Only genuine node references will be copied;
// * nulls appearing in the source NodeSetDTM will
// * not be added to this one. </p>
// *
// * <p> In case you're wondering why this function is needed: NodeSetDTM
// * implements both DTMIterator and NodeList. If this method isn't
// * provided, Java can't decide which of those to use when addNodes()
// * is invoked. Providing the more-explicit match avoids that
// * ambiguity.)</p>
// *
// * @param ns NodeSetDTM whose members should be merged into this NodeSetDTM.
// * @throws RuntimeException thrown if this NodeSetDTM is not of
// * a mutable type.
// */
// public void addNodes(NodeSetDTM ns)
// {
//
// if (!m_mutable)
// throw new RuntimeException("This NodeSetDTM is not mutable!");
//
// addNodes((DTMIterator) ns);
// }
/**
* Copy NodeList members into this nodelist, adding in
* document order. Null references are not added.
*
* @param iterator DTMIterator which yields the nodes to be added.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void addNodes(DTMIterator iterator)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
if (null != iterator) // defensive to fix a bug that Sanjiva reported.
{
int obj;
while (DTM.NULL != (obj = iterator.nextNode()))
{
addElement(obj);
}
}
// checkDups();
}
// %TBD%
// /**
// * Copy NodeList members into this nodelist, adding in
// * document order. If a node is null, don't add it.
// *
// * @param nodelist List of nodes to be added
// * @param support The XPath runtime context.
// * @throws RuntimeException thrown if this NodeSetDTM is not of
// * a mutable type.
// */
// public void addNodesInDocOrder(NodeList nodelist, XPathContext support)
// {
//
// if (!m_mutable)
// throw new RuntimeException("This NodeSetDTM is not mutable!");
//
// int nChildren = nodelist.getLength();
//
// for (int i = 0; i < nChildren; i++)
// {
// int node = nodelist.item(i);
//
// if (null != node)
// {
// addNodeInDocOrder(node, support);
// }
// }
// }
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param iterator DTMIterator which yields the nodes to be added.
* @param support The XPath runtime context.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void addNodesInDocOrder(DTMIterator iterator, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
int node;
while (DTM.NULL != (node = iterator.nextNode()))
{
addNodeInDocOrder(node, support);
}
}
// %TBD%
// /**
// * Add the node list to this node set in document order.
// *
// * @param start index.
// * @param end index.
// * @param testIndex index.
// * @param nodelist The nodelist to add.
// * @param support The XPath runtime context.
// *
// * @return false always.
// * @throws RuntimeException thrown if this NodeSetDTM is not of
// * a mutable type.
// */
// private boolean addNodesInDocOrder(int start, int end, int testIndex,
// NodeList nodelist, XPathContext support)
// {
//
// if (!m_mutable)
// throw new RuntimeException("This NodeSetDTM is not mutable!");
//
// boolean foundit = false;
// int i;
// int node = nodelist.item(testIndex);
//
// for (i = end; i >= start; i--)
// {
// int child = elementAt(i);
//
// if (child == node)
// {
// i = -2; // Duplicate, suppress insert
//
// break;
// }
//
// if (!support.getDOMHelper().isNodeAfter(node, child))
// {
// insertElementAt(node, i + 1);
//
// testIndex--;
//
// if (testIndex > 0)
// {
// boolean foundPrev = addNodesInDocOrder(0, i, testIndex, nodelist,
// support);
//
// if (!foundPrev)
// {
// addNodesInDocOrder(i, size() - 1, testIndex, nodelist, support);
// }
// }
//
// break;
// }
// }
//
// if (i == -1)
// {
// insertElementAt(node, 0);
// }
//
// return foundit;
// }
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param test true if we should test for doc order
* @param support The XPath runtime context.
* @return insertIndex.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public int addNodeInDocOrder(int node, boolean test, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
int insertIndex = -1;
if (test)
{
// This needs to do a binary search, but a binary search
// is somewhat tough because the sequence test involves
// two nodes.
int size = size(), i;
for (i = size - 1; i >= 0; i--)
{
int child = elementAt(i);
if (child == node)
{
i = -2; // Duplicate, suppress insert
break;
}
DTM dtm = support.getDTM(node);
if (!dtm.isNodeAfter(node, child))
{
break;
}
}
if (i != -2)
{
insertIndex = i + 1;
insertElementAt(node, insertIndex);
}
}
else
{
insertIndex = this.size();
boolean foundit = false;
for (int i = 0; i < insertIndex; i++)
{
if (i == node)
{
foundit = true;
break;
}
}
if (!foundit)
addElement(node);
}
// checkDups();
return insertIndex;
} // end addNodeInDocOrder(Vector v, Object obj)
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param support The XPath runtime context.
*
* @return The index where it was inserted.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public int addNodeInDocOrder(int node, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
return addNodeInDocOrder(node, true, support);
} // end addNodeInDocOrder(Vector v, Object obj)
/**
* Get the length of the list.
*
* @return The size of this node set.
*/
public int size()
{
return super.size();
}
/**
* Append a Node onto the vector.
*
* @param value The node to be added.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void addElement(int value)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.addElement(value);
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
*
* @param value The node to be inserted.
* @param at The index where the insert should occur.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void insertElementAt(int value, int at)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.insertElementAt(value, at);
}
/**
* Append the nodes to the list.
*
* @param nodes The nodes to be appended to this node set.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void appendNodes(NodeVector nodes)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.appendNodes(nodes);
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void removeAllElements()
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.removeAllElements();
}
/**
* Removes the first occurrence of the argument from this vector.
* If the object is found in this vector, each component in the vector
* with an index greater or equal to the object's index is shifted
* downward to have an index one smaller than the value it had
* previously.
*
* @param s The node to be removed.
*
* @return True if the node was successfully removed
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public boolean removeElement(int s)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
return super.removeElement(s);
}
/**
* Deletes the component at the specified index. Each component in
* this vector with an index greater or equal to the specified
* index is shifted downward to have an index one smaller than
* the value it had previously.
*
* @param i The index of the node to be removed.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void removeElementAt(int i)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.removeElementAt(i);
}
/**
* Sets the component at the specified index of this vector to be the
* specified object. The previous component at that position is discarded.
*
* The index must be a value greater than or equal to 0 and less
* than the current size of the vector.
*
* @param node The node to be set.
* @param index The index of the node to be replaced.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void setElementAt(int node, int index)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.setElementAt(node, index);
}
/**
* Same as setElementAt.
*
* @param node The node to be set.
* @param index The index of the node to be replaced.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a mutable type.
*/
public void setItem(int node, int index)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!");
super.setElementAt(node, index);
}
/**
* Get the nth element.
*
* @param i The index of the requested node.
*
* @return Node at specified index.
*/
public int elementAt(int i)
{
runTo(i);
return super.elementAt(i);
}
/**
* Tell if the table contains the given node.
*
* @param s Node to look for
*
* @return True if the given node was found.
*/
public boolean contains(int s)
{
runTo(-1);
return super.contains(s);
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @param index Index of where to start the search
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(int elem, int index)
{
runTo(-1);
return super.indexOf(elem, index);
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(int elem)
{
runTo(-1);
return super.indexOf(elem);
}
/** If this node is being used as an iterator, the next index that nextNode()
* will return. */
transient protected int m_next = 0;
/**
* Get the current position, which is one less than
* the next nextNode() call will retrieve. i.e. if
* you call getCurrentPos() and the return is 0, the next
* fetch will take place at index 1.
*
* @return The the current position index.
*/
public int getCurrentPos()
{
return m_next;
}
/**
* Set the current position in the node set.
* @param i Must be a valid index.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a cached type, and thus doesn't permit indexed access.
*/
public void setCurrentPos(int i)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_INDEX, null)); //"This NodeSetDTM can not do indexing or counting functions!");
m_next = i;
}
/**
* Return the last fetched node. Needed to support the UnionPathIterator.
*
* @return the last fetched node.
* @throws RuntimeException thrown if this NodeSetDTM is not of
* a cached type, and thus doesn't permit indexed access.
*/
public int getCurrentNode()
{
if (!m_cacheNodes)
throw new RuntimeException(
"This NodeSetDTM can not do indexing or counting functions!");
int saved = m_next;
// because nextNode always increments
// But watch out for copy29, where the root iterator didn't
// have nextNode called on it.
int current = (m_next > 0) ? m_next-1 : m_next;
int n = (current < m_firstFree) ? elementAt(current) : DTM.NULL;
m_next = saved; // HACK: I think this is a bit of a hack. -sb
return n;
}
/** True if this list can be mutated. */
transient protected boolean m_mutable = true;
/** True if this list is cached.
* @serial */
transient protected boolean m_cacheNodes = true;
/** The root of the iteration, if available. */
protected int m_root = DTM.NULL;
/**
* Get whether or not this is a cached node set.
*
*
* @return True if this list is cached.
*/
public boolean getShouldCacheNodes()
{
return m_cacheNodes;
}
/**
* If setShouldCacheNodes(true) is called, then nodes will
* be cached. They are not cached by default. This switch must
* be set before the first call to nextNode is made, to ensure
* that all nodes are cached.
*
* @param b true if this node set should be cached.
* @throws RuntimeException thrown if an attempt is made to
* request caching after we've already begun stepping through the
* nodes in this set.
*/
public void setShouldCacheNodes(boolean b)
{
if (!isFresh())
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_CANNOT_CALL_SETSHOULDCACHENODE, null)); //"Can not call setShouldCacheNodes after nextNode has been called!");
m_cacheNodes = b;
m_mutable = true;
}
/**
* Tells if this iterator can have nodes added to it or set via
* the <code>setItem(int node, int index)</code> method.
*
* @return True if the nodelist can be mutated.
*/
public boolean isMutable()
{
return m_mutable;
}
transient private int m_last = 0;
public int getLast()
{
return m_last;
}
public void setLast(int last)
{
m_last = last;
}
/**
* Returns true if all the nodes in the iteration well be returned in document
* order.
*
* @return true as a default.
*/
public boolean isDocOrdered()
{
return true;
}
/**
* Returns the axis being iterated, if it is known.
*
* @return Axis.CHILD, etc., or -1 if the axis is not known or is of multiple
* types.
*/
public int getAxis()
{
return -1;
}
}
|
apache/tajo | 34,916 | tajo-core-tests/src/test/java/org/apache/tajo/engine/planner/TestUniformRangePartition.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.engine.planner;
import org.apache.tajo.catalog.Schema;
import org.apache.tajo.catalog.SchemaBuilder;
import org.apache.tajo.catalog.SortSpec;
import org.apache.tajo.common.TajoDataTypes.Type;
import org.apache.tajo.datum.DatumFactory;
import org.apache.tajo.plan.util.PlannerUtil;
import org.apache.tajo.storage.BaseTupleComparator;
import org.apache.tajo.storage.Tuple;
import org.apache.tajo.storage.TupleRange;
import org.apache.tajo.storage.VTuple;
import org.junit.Test;
import java.math.BigInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestUniformRangePartition {
@Test
public void testPartitionForINT2Asc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.INT2)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt2((short) 1));
e.put(0, DatumFactory.createInt2((short) 30000));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForINT2Desc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.INT2)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt2((short) 30000));
e.put(0, DatumFactory.createInt2((short) 1));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForINT4Asc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.INT4)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt4(1));
e.put(0, DatumFactory.createInt4(10000));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForINT4Desc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.INT4).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt4(10000));
e.put(0, DatumFactory.createInt4(1));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForINT8Asc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.INT8).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt8(1));
e.put(0, DatumFactory.createInt8(10000));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForInt8Desc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.INT8)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createInt8(10000));
e.put(0, DatumFactory.createInt8(1));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForFloat4Asc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.FLOAT4)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createFloat4((float) 1.0));
e.put(0, DatumFactory.createFloat4((float) 10000.0));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForFloat4Desc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.FLOAT4)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createFloat4((float) 10000.0));
e.put(0, DatumFactory.createFloat4((float) 1.0));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForFloat8Asc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.FLOAT8).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createFloat8(1.0));
e.put(0, DatumFactory.createFloat8(10000.0));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForFloat8Desc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.FLOAT8)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createFloat8((float) 10000.0));
e.put(0, DatumFactory.createFloat8((float) 1.0));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
/**
* It verify overflow and increment in normal case.
*/
@Test
public void testIncrementOfText() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec[] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createText("A"));
s.put(1, DatumFactory.createText("A"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createText("D"));
e.put(1, DatumFactory.createText("C"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(12, partitioner.getTotalCardinality().intValue());
String [] result = new String[12];
result[0] = "AA";
result[1] = "AB";
result[2] = "AC";
result[3] = "BA";
result[4] = "BB";
result[5] = "BC";
result[6] = "CA";
result[7] = "CB";
result[8] = "CC";
result[9] = "DA";
result[10] = "DB";
result[11] = "DC";
Tuple end = partitioner.increment(s, BigInteger.valueOf(1), 1);
assertEquals("A", end.getText(0));
assertEquals("B", end.getText(1));
for (int i = 2; i < 11; i++ ) {
end = partitioner.increment(end, BigInteger.valueOf(1), 1);
assertEquals(result[i].charAt(0), end.getText(0).charAt(0));
assertEquals(result[i].charAt(1), end.getText(1).charAt(0));
}
}
/**
* It verify overflow with the number that exceeds the last digit.
*/
@Test
public void testIncrementOfText2() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createText("A"));
s.put(1, DatumFactory.createText("A"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createText("D"));
e.put(1, DatumFactory.createText("C"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(12, partitioner.getTotalCardinality().intValue());
String [] result = new String[12];
result[0] = "AA";
result[1] = "AB";
result[2] = "AC";
result[3] = "BA";
result[4] = "BB";
result[5] = "BC";
result[6] = "CA";
result[7] = "CB";
result[8] = "CC";
result[9] = "DA";
result[10] = "DB";
result[11] = "DC";
Tuple end = partitioner.increment(s, BigInteger.valueOf(6), 1);
assertEquals("C", end.getText(0));
assertEquals("A", end.getText(1));
end = partitioner.increment(end, BigInteger.valueOf(5), 1);
assertEquals("D", end.getText(0));
assertEquals("C", end.getText(1));
}
/**
* It verify the case where two or more digits are overflow.
*/
@Test
public void testIncrementOfText3() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.add("final", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(3);
s.put(0, DatumFactory.createText("A"));
s.put(1, DatumFactory.createText("A"));
s.put(2, DatumFactory.createText("A"));
VTuple e = new VTuple(3);
e.put(0, DatumFactory.createText("D")); // 4
e.put(1, DatumFactory.createText("B")); // 2
e.put(2, DatumFactory.createText("C")); // x3 = 24
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(24, partitioner.getTotalCardinality().intValue());
Tuple overflowBefore = partitioner.increment(s, BigInteger.valueOf(5), 2);
assertEquals("A", overflowBefore.getText(0));
assertEquals("B", overflowBefore.getText(1));
assertEquals("C", overflowBefore.getText(2));
Tuple overflowed = partitioner.increment(overflowBefore, BigInteger.valueOf(1), 2);
assertEquals("B", overflowed.getText(0));
assertEquals("A", overflowed.getText(1));
assertEquals("A", overflowed.getText(2));
}
@Test
public void testIncrementOfUnicode() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("가가가"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("하하하"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
BaseTupleComparator comp = new BaseTupleComparator(schema, sortSpecs);
Tuple tuple = s;
Tuple prevTuple = null;
for (int i = 0; i < 100; i++) {
tuple = partitioner.increment(tuple, BigInteger.valueOf(30000), 0);
if (prevTuple != null) {
assertTrue("prev=" + prevTuple + ", current=" + tuple, comp.compare(prevTuple, tuple) < 0);
}
prevTuple = tuple;
}
}
@Test
public void testIncrementOfUnicodeOneCharSinglePartition() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("가"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("다"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 1;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testIncrementOfUnicodeOneCharMultiPartition() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("가"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("꽥"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 8;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeTextAsc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("가가가"));
e.put(0, DatumFactory.createText("하하하"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeDiffLenBeginTextAsc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("가"));
e.put(0, DatumFactory.createText("하하하"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeDiffLenEndTextAsc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("가가가"));
e.put(0, DatumFactory.createText("하"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeTextDesc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("하하하"));
e.put(0, DatumFactory.createText("가가가"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeDiffLenBeginTextDesc() {
Schema schema = SchemaBuilder.builder().add("col1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("하"));
e.put(0, DatumFactory.createText("가가가"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForUnicodeDiffLenEndTextDesc() {
Schema schema = SchemaBuilder.builder()
.add("col1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
VTuple e = new VTuple(1);
s.put(0, DatumFactory.createText("하"));
e.put(0, DatumFactory.createText("가가가"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
int partNum = 64;
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testIncrementOfInt8() {
Schema schema = SchemaBuilder.builder()
.add("l_orderkey", Type.INT8)
.add("l_linenumber", Type.INT8)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createInt8(10));
s.put(1, DatumFactory.createInt8(20));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createInt8(19));
e.put(1, DatumFactory.createInt8(39));
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(200, partitioner.getTotalCardinality().longValue());
Tuple range2 = partitioner.increment(s, BigInteger.valueOf(100), 1);
assertEquals(15, range2.getInt4(0));
assertEquals(20, range2.getInt4(1));
Tuple range3 = partitioner.increment(range2, BigInteger.valueOf(99), 1);
assertEquals(19, range3.getInt4(0));
assertEquals(39, range3.getInt4(1));
}
@Test public void testIncrementOfInt8AndFinal() {
Schema schema = SchemaBuilder.builder()
.add("l_orderkey", Type.INT8)
.add("l_linenumber", Type.INT8)
.add("final", Type.INT8)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(3);
s.put(0, DatumFactory.createInt8(1));
s.put(1, DatumFactory.createInt8(1));
s.put(2, DatumFactory.createInt8(1));
VTuple e = new VTuple(3);
e.put(0, DatumFactory.createInt8(4)); // 4
e.put(1, DatumFactory.createInt8(2)); // 2
e.put(2, DatumFactory.createInt8(3)); //x3 = 24
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(24, partitioner.getTotalCardinality().longValue());
Tuple beforeOverflow = partitioner.increment(s, BigInteger.valueOf(5), 2);
assertEquals(1, beforeOverflow.getInt8(0));
assertEquals(2, beforeOverflow.getInt8(1));
assertEquals(3, beforeOverflow.getInt8(2));
Tuple overflow = partitioner.increment(beforeOverflow, BigInteger.valueOf(1), 2);
assertEquals(2, overflow.getInt8(0));
assertEquals(1, overflow.getInt8(1));
assertEquals(1, overflow.getInt8(2));
}
@Test
public void testIncrementOfFloat8() {
Schema schema = SchemaBuilder.builder()
.add("l_orderkey", Type.FLOAT8)
.add("l_linenumber", Type.FLOAT8)
.add("final", Type.FLOAT8)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(3);
s.put(0, DatumFactory.createFloat8(1.1d));
s.put(1, DatumFactory.createFloat8(1.1d));
s.put(2, DatumFactory.createFloat8(1.1d));
VTuple e = new VTuple(3);
e.put(0, DatumFactory.createFloat8(4.1d)); // 4
e.put(1, DatumFactory.createFloat8(2.1d)); // 2
e.put(2, DatumFactory.createFloat8(3.1d)); //x3 = 24
TupleRange expected = new TupleRange(sortSpecs, s, e);
UniformRangePartition partitioner = new UniformRangePartition(expected, sortSpecs);
assertEquals(24, partitioner.getTotalCardinality().longValue());
Tuple beforeOverflow = partitioner.increment(s, BigInteger.valueOf(5), 2);
assertTrue(1.1d == beforeOverflow.getFloat8(0));
assertTrue(2.1d == beforeOverflow.getFloat8(1));
assertTrue(3.1d == beforeOverflow.getFloat8(2));
Tuple overflow = partitioner.increment(beforeOverflow, BigInteger.valueOf(1), 2);
assertTrue(2.1d == overflow.getFloat8(0));
assertTrue(1.1d == overflow.getFloat8(1));
assertTrue(1.1d == overflow.getFloat8(2));
}
@Test
public void testPartition() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createText("A"));
s.put(1, DatumFactory.createText("F"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createText("R"));
e.put(1, DatumFactory.createText("O"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner
= new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(31);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
}
@Test
public void testPartitionForOnePartNum() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createText("A"));
s.put(1, DatumFactory.createText("F"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createText("R"));
e.put(1, DatumFactory.createText("O"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(1);
assertEquals(expected, ranges[0]);
}
@Test
public void testPartitionForOnePartNumWithOneOfTheValueNull() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createNullDatum());
s.put(1, DatumFactory.createText("F"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createText("R"));
e.put(1, DatumFactory.createNullDatum());
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(1);
assertEquals(expected, ranges[0]);
}
@Test
public void testPartitionForMultipleChars() {
Schema schema = SchemaBuilder.builder()
.add("KEY1", Type.TEXT).build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("AAA"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("ZZZ"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(48);
TupleRange prev = null;
for (int i = 0; i < ranges.length; i++) {
if (prev != null) {
assertTrue(i + "th, prev=" + prev + ",cur=" + ranges[i], prev.compareTo(ranges[i]) < 0);
}
prev = ranges[i];
}
assertEquals(48, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[47].getEnd().equals(e));
}
@Test
public void testPartitionForMultipleChars2() {
Schema schema = SchemaBuilder
.builder()
.add("KEY1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("A1"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("A999975"));
final int partNum = 2;
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForMultipleChars2Desc() {
Schema schema = SchemaBuilder.builder()
.add("KEY1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
sortSpecs[0].setDescOrder();
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("A999975"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("A1"));
final int partNum = 48;
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForMultipleCharsWithSameFirstChar() {
Schema schema = SchemaBuilder.builder()
.add("KEY1", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(1);
s.put(0, DatumFactory.createText("AAA"));
VTuple e = new VTuple(1);
e.put(0, DatumFactory.createText("AAZ"));
final int partNum = 4;
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(partNum);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
assertEquals(partNum, ranges.length);
assertTrue(ranges[0].getStart().equals(s));
assertTrue(ranges[partNum - 1].getEnd().equals(e));
}
@Test
public void testPartitionForOnePartNumWithBothValueNull() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createNullDatum());
s.put(1, DatumFactory.createNullDatum());
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createNullDatum());
e.put(1, DatumFactory.createNullDatum());
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner =
new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(1);
assertEquals(expected, ranges[0]);
}
@Test
public void testPartitionWithNull() {
Schema schema = SchemaBuilder.builder()
.add("l_returnflag", Type.TEXT)
.add("l_linestatus", Type.TEXT)
.build();
SortSpec [] sortSpecs = PlannerUtil.schemaToSortSpecs(schema);
VTuple s = new VTuple(2);
s.put(0, DatumFactory.createNullDatum());
s.put(1, DatumFactory.createText("F"));
VTuple e = new VTuple(2);
e.put(0, DatumFactory.createNullDatum());
e.put(1, DatumFactory.createText("O"));
TupleRange expected = new TupleRange(sortSpecs, s, e);
RangePartitionAlgorithm partitioner
= new UniformRangePartition(expected, sortSpecs, true);
TupleRange [] ranges = partitioner.partition(10);
TupleRange prev = null;
for (TupleRange r : ranges) {
if (prev != null) {
assertTrue(prev.compareTo(r) < 0);
}
prev = r;
}
}
}
|
googleapis/google-cloud-java | 35,246 | java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/BatchCreateResourceValueConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1;
/**
*
*
* <pre>
* Response message for BatchCreateResourceValueConfigs
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse}
*/
public final class BatchCreateResourceValueConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse)
BatchCreateResourceValueConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateResourceValueConfigsResponse.newBuilder() to construct.
private BatchCreateResourceValueConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateResourceValueConfigsResponse() {
resourceValueConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateResourceValueConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_BatchCreateResourceValueConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse.class,
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse.Builder
.class);
}
public static final int RESOURCE_VALUE_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securitycenter.v1.ResourceValueConfig>
resourceValueConfigs_;
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securitycenter.v1.ResourceValueConfig>
getResourceValueConfigsList() {
return resourceValueConfigs_;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder>
getResourceValueConfigsOrBuilderList() {
return resourceValueConfigs_;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public int getResourceValueConfigsCount() {
return resourceValueConfigs_.size();
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.ResourceValueConfig getResourceValueConfigs(int index) {
return resourceValueConfigs_.get(index);
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder
getResourceValueConfigsOrBuilder(int index) {
return resourceValueConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < resourceValueConfigs_.size(); i++) {
output.writeMessage(1, resourceValueConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < resourceValueConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, resourceValueConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse other =
(com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse) obj;
if (!getResourceValueConfigsList().equals(other.getResourceValueConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResourceValueConfigsCount() > 0) {
hash = (37 * hash) + RESOURCE_VALUE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getResourceValueConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for BatchCreateResourceValueConfigs
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse)
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_BatchCreateResourceValueConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse.class,
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse.Builder
.class);
}
// Construct using
// com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigs_ = java.util.Collections.emptyList();
} else {
resourceValueConfigs_ = null;
resourceValueConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse build() {
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
buildPartial() {
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse result =
new com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse result) {
if (resourceValueConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
resourceValueConfigs_ = java.util.Collections.unmodifiableList(resourceValueConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resourceValueConfigs_ = resourceValueConfigs_;
} else {
result.resourceValueConfigs_ = resourceValueConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse) {
return mergeFrom(
(com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse other) {
if (other
== com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
.getDefaultInstance()) return this;
if (resourceValueConfigsBuilder_ == null) {
if (!other.resourceValueConfigs_.isEmpty()) {
if (resourceValueConfigs_.isEmpty()) {
resourceValueConfigs_ = other.resourceValueConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.addAll(other.resourceValueConfigs_);
}
onChanged();
}
} else {
if (!other.resourceValueConfigs_.isEmpty()) {
if (resourceValueConfigsBuilder_.isEmpty()) {
resourceValueConfigsBuilder_.dispose();
resourceValueConfigsBuilder_ = null;
resourceValueConfigs_ = other.resourceValueConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
resourceValueConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResourceValueConfigsFieldBuilder()
: null;
} else {
resourceValueConfigsBuilder_.addAllMessages(other.resourceValueConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securitycenter.v1.ResourceValueConfig m =
input.readMessage(
com.google.cloud.securitycenter.v1.ResourceValueConfig.parser(),
extensionRegistry);
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(m);
} else {
resourceValueConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securitycenter.v1.ResourceValueConfig>
resourceValueConfigs_ = java.util.Collections.emptyList();
private void ensureResourceValueConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
resourceValueConfigs_ =
new java.util.ArrayList<com.google.cloud.securitycenter.v1.ResourceValueConfig>(
resourceValueConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1.ResourceValueConfig,
com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder>
resourceValueConfigsBuilder_;
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.securitycenter.v1.ResourceValueConfig>
getResourceValueConfigsList() {
if (resourceValueConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(resourceValueConfigs_);
} else {
return resourceValueConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public int getResourceValueConfigsCount() {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.size();
} else {
return resourceValueConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v1.ResourceValueConfig getResourceValueConfigs(
int index) {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.get(index);
} else {
return resourceValueConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder setResourceValueConfigs(
int index, com.google.cloud.securitycenter.v1.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.set(index, value);
onChanged();
} else {
resourceValueConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder setResourceValueConfigs(
int index, com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.set(index, builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
com.google.cloud.securitycenter.v1.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(value);
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
int index, com.google.cloud.securitycenter.v1.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(index, value);
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
int index, com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(index, builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addAllResourceValueConfigs(
java.lang.Iterable<? extends com.google.cloud.securitycenter.v1.ResourceValueConfig>
values) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resourceValueConfigs_);
onChanged();
} else {
resourceValueConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder clearResourceValueConfigs() {
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resourceValueConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder removeResourceValueConfigs(int index) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.remove(index);
onChanged();
} else {
resourceValueConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder
getResourceValueConfigsBuilder(int index) {
return getResourceValueConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder
getResourceValueConfigsOrBuilder(int index) {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.get(index);
} else {
return resourceValueConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<? extends com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder>
getResourceValueConfigsOrBuilderList() {
if (resourceValueConfigsBuilder_ != null) {
return resourceValueConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resourceValueConfigs_);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder
addResourceValueConfigsBuilder() {
return getResourceValueConfigsFieldBuilder()
.addBuilder(com.google.cloud.securitycenter.v1.ResourceValueConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder
addResourceValueConfigsBuilder(int index) {
return getResourceValueConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.securitycenter.v1.ResourceValueConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v1.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder>
getResourceValueConfigsBuilderList() {
return getResourceValueConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1.ResourceValueConfig,
com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder>
getResourceValueConfigsFieldBuilder() {
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1.ResourceValueConfig,
com.google.cloud.securitycenter.v1.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v1.ResourceValueConfigOrBuilder>(
resourceValueConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
resourceValueConfigs_ = null;
}
return resourceValueConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse)
private static final com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse();
}
public static com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateResourceValueConfigsResponse>() {
@java.lang.Override
public BatchCreateResourceValueConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1.BatchCreateResourceValueConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,074 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ExamplesRestrictionsNamespace.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/explanation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Restrictions namespace for example-based explanations overrides.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace}
*/
public final class ExamplesRestrictionsNamespace extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace)
ExamplesRestrictionsNamespaceOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExamplesRestrictionsNamespace.newBuilder() to construct.
private ExamplesRestrictionsNamespace(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExamplesRestrictionsNamespace() {
namespaceName_ = "";
allow_ = com.google.protobuf.LazyStringArrayList.emptyList();
deny_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExamplesRestrictionsNamespace();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ExplanationProto
.internal_static_google_cloud_aiplatform_v1beta1_ExamplesRestrictionsNamespace_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ExplanationProto
.internal_static_google_cloud_aiplatform_v1beta1_ExamplesRestrictionsNamespace_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.class,
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.Builder.class);
}
public static final int NAMESPACE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object namespaceName_ = "";
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @return The namespaceName.
*/
@java.lang.Override
public java.lang.String getNamespaceName() {
java.lang.Object ref = namespaceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
namespaceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @return The bytes for namespaceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNamespaceNameBytes() {
java.lang.Object ref = namespaceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
namespaceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ALLOW_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList allow_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @return A list containing the allow.
*/
public com.google.protobuf.ProtocolStringList getAllowList() {
return allow_;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @return The count of allow.
*/
public int getAllowCount() {
return allow_.size();
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param index The index of the element to return.
* @return The allow at the given index.
*/
public java.lang.String getAllow(int index) {
return allow_.get(index);
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the allow at the given index.
*/
public com.google.protobuf.ByteString getAllowBytes(int index) {
return allow_.getByteString(index);
}
public static final int DENY_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList deny_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @return A list containing the deny.
*/
public com.google.protobuf.ProtocolStringList getDenyList() {
return deny_;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @return The count of deny.
*/
public int getDenyCount() {
return deny_.size();
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param index The index of the element to return.
* @return The deny at the given index.
*/
public java.lang.String getDeny(int index) {
return deny_.get(index);
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the deny at the given index.
*/
public com.google.protobuf.ByteString getDenyBytes(int index) {
return deny_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespaceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, namespaceName_);
}
for (int i = 0; i < allow_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, allow_.getRaw(i));
}
for (int i = 0; i < deny_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, deny_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespaceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, namespaceName_);
}
{
int dataSize = 0;
for (int i = 0; i < allow_.size(); i++) {
dataSize += computeStringSizeNoTag(allow_.getRaw(i));
}
size += dataSize;
size += 1 * getAllowList().size();
}
{
int dataSize = 0;
for (int i = 0; i < deny_.size(); i++) {
dataSize += computeStringSizeNoTag(deny_.getRaw(i));
}
size += dataSize;
size += 1 * getDenyList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace other =
(com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace) obj;
if (!getNamespaceName().equals(other.getNamespaceName())) return false;
if (!getAllowList().equals(other.getAllowList())) return false;
if (!getDenyList().equals(other.getDenyList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAMESPACE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getNamespaceName().hashCode();
if (getAllowCount() > 0) {
hash = (37 * hash) + ALLOW_FIELD_NUMBER;
hash = (53 * hash) + getAllowList().hashCode();
}
if (getDenyCount() > 0) {
hash = (37 * hash) + DENY_FIELD_NUMBER;
hash = (53 * hash) + getDenyList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Restrictions namespace for example-based explanations overrides.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace)
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespaceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ExplanationProto
.internal_static_google_cloud_aiplatform_v1beta1_ExamplesRestrictionsNamespace_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ExplanationProto
.internal_static_google_cloud_aiplatform_v1beta1_ExamplesRestrictionsNamespace_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.class,
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
namespaceName_ = "";
allow_ = com.google.protobuf.LazyStringArrayList.emptyList();
deny_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ExplanationProto
.internal_static_google_cloud_aiplatform_v1beta1_ExamplesRestrictionsNamespace_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace build() {
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace buildPartial() {
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace result =
new com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.namespaceName_ = namespaceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
allow_.makeImmutable();
result.allow_ = allow_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
deny_.makeImmutable();
result.deny_ = deny_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace other) {
if (other
== com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace.getDefaultInstance())
return this;
if (!other.getNamespaceName().isEmpty()) {
namespaceName_ = other.namespaceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.allow_.isEmpty()) {
if (allow_.isEmpty()) {
allow_ = other.allow_;
bitField0_ |= 0x00000002;
} else {
ensureAllowIsMutable();
allow_.addAll(other.allow_);
}
onChanged();
}
if (!other.deny_.isEmpty()) {
if (deny_.isEmpty()) {
deny_ = other.deny_;
bitField0_ |= 0x00000004;
} else {
ensureDenyIsMutable();
deny_.addAll(other.deny_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
namespaceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureAllowIsMutable();
allow_.add(s);
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensureDenyIsMutable();
deny_.add(s);
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object namespaceName_ = "";
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @return The namespaceName.
*/
public java.lang.String getNamespaceName() {
java.lang.Object ref = namespaceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
namespaceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @return The bytes for namespaceName.
*/
public com.google.protobuf.ByteString getNamespaceNameBytes() {
java.lang.Object ref = namespaceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
namespaceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @param value The namespaceName to set.
* @return This builder for chaining.
*/
public Builder setNamespaceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
namespaceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearNamespaceName() {
namespaceName_ = getDefaultInstance().getNamespaceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The namespace name.
* </pre>
*
* <code>string namespace_name = 1;</code>
*
* @param value The bytes for namespaceName to set.
* @return This builder for chaining.
*/
public Builder setNamespaceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
namespaceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList allow_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAllowIsMutable() {
if (!allow_.isModifiable()) {
allow_ = new com.google.protobuf.LazyStringArrayList(allow_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @return A list containing the allow.
*/
public com.google.protobuf.ProtocolStringList getAllowList() {
allow_.makeImmutable();
return allow_;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @return The count of allow.
*/
public int getAllowCount() {
return allow_.size();
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param index The index of the element to return.
* @return The allow at the given index.
*/
public java.lang.String getAllow(int index) {
return allow_.get(index);
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the allow at the given index.
*/
public com.google.protobuf.ByteString getAllowBytes(int index) {
return allow_.getByteString(index);
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param index The index to set the value at.
* @param value The allow to set.
* @return This builder for chaining.
*/
public Builder setAllow(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowIsMutable();
allow_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param value The allow to add.
* @return This builder for chaining.
*/
public Builder addAllow(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowIsMutable();
allow_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param values The allow to add.
* @return This builder for chaining.
*/
public Builder addAllAllow(java.lang.Iterable<java.lang.String> values) {
ensureAllowIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, allow_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAllow() {
allow_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of allowed tags.
* </pre>
*
* <code>repeated string allow = 2;</code>
*
* @param value The bytes of the allow to add.
* @return This builder for chaining.
*/
public Builder addAllowBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureAllowIsMutable();
allow_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList deny_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureDenyIsMutable() {
if (!deny_.isModifiable()) {
deny_ = new com.google.protobuf.LazyStringArrayList(deny_);
}
bitField0_ |= 0x00000004;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @return A list containing the deny.
*/
public com.google.protobuf.ProtocolStringList getDenyList() {
deny_.makeImmutable();
return deny_;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @return The count of deny.
*/
public int getDenyCount() {
return deny_.size();
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param index The index of the element to return.
* @return The deny at the given index.
*/
public java.lang.String getDeny(int index) {
return deny_.get(index);
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the deny at the given index.
*/
public com.google.protobuf.ByteString getDenyBytes(int index) {
return deny_.getByteString(index);
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param index The index to set the value at.
* @param value The deny to set.
* @return This builder for chaining.
*/
public Builder setDeny(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureDenyIsMutable();
deny_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param value The deny to add.
* @return This builder for chaining.
*/
public Builder addDeny(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureDenyIsMutable();
deny_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param values The deny to add.
* @return This builder for chaining.
*/
public Builder addAllDeny(java.lang.Iterable<java.lang.String> values) {
ensureDenyIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, deny_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearDeny() {
deny_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of deny tags.
* </pre>
*
* <code>repeated string deny = 3;</code>
*
* @param value The bytes of the deny to add.
* @return This builder for chaining.
*/
public Builder addDenyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureDenyIsMutable();
deny_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace)
private static final com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace();
}
public static com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExamplesRestrictionsNamespace> PARSER =
new com.google.protobuf.AbstractParser<ExamplesRestrictionsNamespace>() {
@java.lang.Override
public ExamplesRestrictionsNamespace parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ExamplesRestrictionsNamespace> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExamplesRestrictionsNamespace> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ExamplesRestrictionsNamespace
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,162 | java-bigqueryreservation/proto-google-cloud-bigqueryreservation-v1/src/main/java/com/google/cloud/bigquery/reservation/v1/MoveAssignmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/reservation/v1/reservation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.reservation.v1;
/**
*
*
* <pre>
* The request for
* [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment].
*
* **Note**: "bigquery.reservationAssignments.create" permission is required on
* the destination_id.
*
* **Note**: "bigquery.reservationAssignments.create" and
* "bigquery.reservationAssignments.delete" permission are required on the
* related assignee.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.MoveAssignmentRequest}
*/
public final class MoveAssignmentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.reservation.v1.MoveAssignmentRequest)
MoveAssignmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use MoveAssignmentRequest.newBuilder() to construct.
private MoveAssignmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MoveAssignmentRequest() {
name_ = "";
destinationId_ = "";
assignmentId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MoveAssignmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_MoveAssignmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_MoveAssignmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.class,
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESTINATION_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object destinationId_ = "";
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The destinationId.
*/
@java.lang.Override
public java.lang.String getDestinationId() {
java.lang.Object ref = destinationId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
destinationId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for destinationId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDestinationIdBytes() {
java.lang.Object ref = destinationId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
destinationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ASSIGNMENT_ID_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object assignmentId_ = "";
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @return The assignmentId.
*/
@java.lang.Override
public java.lang.String getAssignmentId() {
java.lang.Object ref = assignmentId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
assignmentId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @return The bytes for assignmentId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAssignmentIdBytes() {
java.lang.Object ref = assignmentId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
assignmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(destinationId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, destinationId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assignmentId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, assignmentId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(destinationId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, destinationId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(assignmentId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, assignmentId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest other =
(com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getDestinationId().equals(other.getDestinationId())) return false;
if (!getAssignmentId().equals(other.getAssignmentId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DESTINATION_ID_FIELD_NUMBER;
hash = (53 * hash) + getDestinationId().hashCode();
hash = (37 * hash) + ASSIGNMENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getAssignmentId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request for
* [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment].
*
* **Note**: "bigquery.reservationAssignments.create" permission is required on
* the destination_id.
*
* **Note**: "bigquery.reservationAssignments.create" and
* "bigquery.reservationAssignments.delete" permission are required on the
* related assignee.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.reservation.v1.MoveAssignmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.reservation.v1.MoveAssignmentRequest)
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_MoveAssignmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_MoveAssignmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.class,
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
destinationId_ = "";
assignmentId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.reservation.v1.ReservationProto
.internal_static_google_cloud_bigquery_reservation_v1_MoveAssignmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest build() {
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest buildPartial() {
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest result =
new com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.destinationId_ = destinationId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.assignmentId_ = assignmentId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest) {
return mergeFrom((com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest other) {
if (other
== com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDestinationId().isEmpty()) {
destinationId_ = other.destinationId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getAssignmentId().isEmpty()) {
assignmentId_ = other.assignmentId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
destinationId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 26
case 42:
{
assignmentId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the assignment,
* e.g.
* `projects/myproject/locations/US/reservations/team1-prod/assignments/123`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object destinationId_ = "";
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The destinationId.
*/
public java.lang.String getDestinationId() {
java.lang.Object ref = destinationId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
destinationId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for destinationId.
*/
public com.google.protobuf.ByteString getDestinationIdBytes() {
java.lang.Object ref = destinationId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
destinationId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The destinationId to set.
* @return This builder for chaining.
*/
public Builder setDestinationId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
destinationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearDestinationId() {
destinationId_ = getDefaultInstance().getDestinationId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The new reservation ID, e.g.:
* `projects/myotherproject/locations/US/reservations/team2-prod`
* </pre>
*
* <code>string destination_id = 3 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for destinationId to set.
* @return This builder for chaining.
*/
public Builder setDestinationIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
destinationId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object assignmentId_ = "";
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @return The assignmentId.
*/
public java.lang.String getAssignmentId() {
java.lang.Object ref = assignmentId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
assignmentId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @return The bytes for assignmentId.
*/
public com.google.protobuf.ByteString getAssignmentIdBytes() {
java.lang.Object ref = assignmentId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
assignmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @param value The assignmentId to set.
* @return This builder for chaining.
*/
public Builder setAssignmentId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
assignmentId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearAssignmentId() {
assignmentId_ = getDefaultInstance().getAssignmentId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The optional assignment ID. A new assignment name is generated if this
* field is empty.
*
* This field can contain only lowercase alphanumeric characters or dashes.
* Max length is 64 characters.
* </pre>
*
* <code>string assignment_id = 5;</code>
*
* @param value The bytes for assignmentId to set.
* @return This builder for chaining.
*/
public Builder setAssignmentIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
assignmentId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.reservation.v1.MoveAssignmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.reservation.v1.MoveAssignmentRequest)
private static final com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest();
}
public static com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MoveAssignmentRequest> PARSER =
new com.google.protobuf.AbstractParser<MoveAssignmentRequest>() {
@java.lang.Override
public MoveAssignmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MoveAssignmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MoveAssignmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.reservation.v1.MoveAssignmentRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite | 35,245 | modules/core/src/main/java/org/apache/ignite/spi/IgniteSpiAdapter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.function.Supplier;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.IgniteNodeAttributes;
import org.apache.ignite.internal.managers.communication.GridMessageListener;
import org.apache.ignite.internal.managers.eventstorage.GridLocalEventListener;
import org.apache.ignite.internal.processors.timeout.GridSpiTimeoutObject;
import org.apache.ignite.internal.util.IgniteExceptionRegistry;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageFactory;
import org.apache.ignite.plugin.extensions.communication.MessageFormatter;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageSerializer;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.apache.ignite.plugin.security.SecuritySubject;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.spi.discovery.DiscoveryDataBag;
import org.apache.ignite.spi.metric.ReadOnlyMetricRegistry;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK;
import static org.apache.ignite.configuration.IgniteConfiguration.DFLT_FAILURE_DETECTION_TIMEOUT;
import static org.apache.ignite.events.EventType.EVT_NODE_JOINED;
/**
* This class provides convenient adapter for SPI implementations.
*/
public abstract class IgniteSpiAdapter implements IgniteSpi {
/** */
private ObjectName spiMBean;
/** SPI start timestamp. */
private long startTstamp;
/** */
@LoggerResource
protected IgniteLogger log;
/** Ignite instance. */
protected Ignite ignite;
/** Ignite instance name. */
protected String igniteInstanceName;
/** SPI name. */
private String name;
/** Grid SPI context. */
private volatile IgniteSpiContext spiCtx = new GridDummySpiContext(null, false, null);
/** Discovery listener. */
private GridLocalEventListener paramsLsnr;
/** Local node. */
private ClusterNode locNode;
/** Failure detection timeout usage switch. */
private boolean failureDetectionTimeoutEnabled = true;
/**
* Failure detection timeout for client nodes. Initialized with the value of
* {@link IgniteConfiguration#getClientFailureDetectionTimeout()}.
*/
private long clientFailureDetectionTimeout;
/**
* Failure detection timeout. Initialized with the value of
* {@link IgniteConfiguration#getFailureDetectionTimeout()}.
*/
private long failureDetectionTimeout;
/** Start flag to deny repeating start attempts. */
private final AtomicBoolean startedFlag = new AtomicBoolean();
/**
* Creates new adapter and initializes it from the current (this) class.
* SPI name will be initialized to the simple name of the class
* (see {@link Class#getSimpleName()}).
*/
protected IgniteSpiAdapter() {
name = U.getSimpleName(getClass());
}
/**
* Starts startup stopwatch.
*/
protected void startStopwatch() {
startTstamp = U.currentTimeMillis();
}
/**
* This method is called by built-in managers implementation to avoid
* repeating SPI start attempts.
*/
public final void onBeforeStart() {
if (!startedFlag.compareAndSet(false, true))
throw new IllegalStateException("SPI has already been started " +
"(always create new configuration instance for each starting Ignite instances) " +
"[spi=" + this + ']');
}
/**
* Checks if {@link #onBeforeStart()} has been called on this SPI instance.
*
* @return {@code True} if {@link #onBeforeStart()} has already been called.
*/
public final boolean started() {
return startedFlag.get();
}
/**
* @return Local node.
*/
protected ClusterNode getLocalNode() {
if (locNode != null)
return locNode;
locNode = getSpiContext().localNode();
return locNode;
}
/** {@inheritDoc} */
@Override public String getName() {
return name;
}
/**
* Gets ignite instance.
*
* @return Ignite instance.
*/
public Ignite ignite() {
return ignite;
}
/**
* Sets SPI name.
*
* @param name SPI name.
* @return {@code this} for chaining.
*/
@IgniteSpiConfiguration(optional = true)
public IgniteSpiAdapter setName(String name) {
this.name = name;
return this;
}
/** {@inheritDoc} */
@Override public final void onContextInitialized(final IgniteSpiContext spiCtx) throws IgniteSpiException {
assert spiCtx != null;
this.spiCtx = spiCtx;
if (!Boolean.getBoolean(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK)) {
spiCtx.addLocalEventListener(paramsLsnr = new GridLocalEventListener() {
@Override public void onEvent(Event evt) {
assert evt instanceof DiscoveryEvent : "Invalid event [expected=" + EVT_NODE_JOINED +
", actual=" + evt.type() + ", evt=" + evt + ']';
ClusterNode node = spiCtx.node(((DiscoveryEvent)evt).eventNode().id());
if (node != null)
try {
checkConfigurationConsistency(spiCtx, node, false);
checkConfigurationConsistency0(spiCtx, node, false);
}
catch (IgniteSpiException e) {
U.error(log, "Spi consistency check failed [node=" + node.id() + ", spi=" + getName() + ']',
e);
}
}
}, EVT_NODE_JOINED);
final Collection<ClusterNode> remotes = spiCtx.remoteNodes();
for (ClusterNode node : remotes) {
checkConfigurationConsistency(spiCtx, node, true);
checkConfigurationConsistency0(spiCtx, node, true);
}
}
onContextInitialized0(spiCtx);
}
/**
* Method to be called in the end of onContextInitialized method.
*
* @param spiCtx SPI context.
* @throws IgniteSpiException In case of errors.
*/
protected void onContextInitialized0(final IgniteSpiContext spiCtx) throws IgniteSpiException {
// No-op.
}
/** {@inheritDoc} */
@Override public final void onContextDestroyed() {
onContextDestroyed0();
if (spiCtx != null && paramsLsnr != null)
spiCtx.removeLocalEventListener(paramsLsnr);
ClusterNode locNode = spiCtx == null ? null : spiCtx.localNode();
// Set dummy no-op context.
spiCtx = new GridDummySpiContext(locNode, true, spiCtx);
}
/** {@inheritDoc} */
@Override public void onClientDisconnected(IgniteFuture<?> reconnectFut) {
// No-op.
}
/** {@inheritDoc} */
@Override public void onClientReconnected(boolean clusterRestarted) {
// No-op.
}
/**
* Inject ignite instance.
*
* @param ignite Ignite instance.
*/
@IgniteInstanceResource
protected void injectResources(Ignite ignite) {
this.ignite = ignite;
if (ignite != null && igniteInstanceName == null)
igniteInstanceName = ignite.name();
}
/**
* Method to be called in the beginning of onContextDestroyed() method.
*/
protected void onContextDestroyed0() {
// No-op.
}
/**
* This method returns SPI internal instances that need to be injected as well.
* Usually these will be instances provided to SPI externally by user, e.g. during
* SPI configuration.
*
* @return Internal SPI objects that also need to be injected.
*/
public Collection<Object> injectables() {
return Collections.emptyList();
}
/**
* Gets SPI context.
*
* @return SPI context.
*/
public IgniteSpiContext getSpiContext() {
return spiCtx;
}
/**
* Gets Exception registry.
*
* @return Exception registry.
*/
public IgniteExceptionRegistry getExceptionRegistry() {
return IgniteExceptionRegistry.get();
}
/** {@inheritDoc} */
@Override public Map<String, Object> getNodeAttributes() throws IgniteSpiException {
return Collections.emptyMap();
}
/**
* Throws exception with uniform error message if given parameter's assertion condition
* is {@code false}.
*
* @param cond Assertion condition to check.
* @param condDesc Description of failed condition. Note that this description should include
* JavaBean name of the property (<b>not</b> a variable name) as well condition in
* Java syntax like, for example:
* <pre name="code" class="java">
* ...
* assertParameter(dirPath != null, "dirPath != null");
* ...
* </pre>
* Note that in case when variable name is the same as JavaBean property you
* can just copy Java condition expression into description as a string.
* @throws IgniteSpiException Thrown if given condition is {@code false}
*/
protected final void assertParameter(boolean cond, String condDesc) throws IgniteSpiException {
if (!cond)
throw new IgniteSpiException("SPI parameter failed condition check: " + condDesc);
}
/**
* Gets uniformly formatted message for SPI start.
*
* @return Uniformly formatted message for SPI start.
*/
protected final String startInfo() {
return "SPI started ok [startMs=" + startTstamp + ", spiMBean=" + spiMBean + ']';
}
/**
* Gets SPI startup time.
* @return Time in millis.
*/
final long getStartTstamp() {
return startTstamp;
}
/**
* Gets uniformly format message for SPI stop.
*
* @return Uniformly format message for SPI stop.
*/
protected final String stopInfo() {
return "SPI stopped ok.";
}
/**
* Gets uniformed string for configuration parameter.
*
* @param name Parameter name.
* @param val Parameter value.
* @return Uniformed string for configuration parameter.
*/
protected final String configInfo(String name, Object val) {
assert name != null;
return "Using parameter [" + name + '=' + val + ']';
}
/**
* @param msg Error message.
* @param locVal Local node value.
* @return Error text.
*/
private static String format(String msg, Object locVal) {
return msg + U.nl() +
">>> => Local node: " + locVal + U.nl();
}
/**
* @param msg Error message.
* @param locVal Local node value.
* @param rmtVal Remote node value.
* @return Error text.
*/
private static String format(String msg, Object locVal, Object rmtVal) {
return msg + U.nl() +
">>> => Local node: " + locVal + U.nl() +
">>> => Remote node: " + rmtVal + U.nl();
}
/**
* Registers SPI MBean. Note that SPI can only register one MBean.
*
* @param igniteInstanceName Ignite instance name. If null, then name will be empty.
* @param impl MBean implementation.
* @param mbeanItf MBean interface (if {@code null}, then standard JMX
* naming conventions are used.
* @param <T> Type of the MBean
* @throws IgniteSpiException If registration failed.
*/
protected final <T extends IgniteSpiManagementMBean> void registerMBean(
String igniteInstanceName,
T impl,
Class<T> mbeanItf
) throws IgniteSpiException {
if (ignite == null || U.IGNITE_MBEANS_DISABLED)
return;
MBeanServer jmx = ignite.configuration().getMBeanServer();
assert mbeanItf == null || mbeanItf.isInterface();
assert jmx != null;
try {
spiMBean = U.registerMBean(jmx, igniteInstanceName, "SPIs", getName(), impl, mbeanItf);
if (log.isDebugEnabled())
log.debug("Registered SPI MBean: " + spiMBean);
}
catch (JMException e) {
throw new IgniteSpiException("Failed to register SPI MBean: " + spiMBean, e);
}
}
/**
* Unregisters MBean.
*
* @throws IgniteSpiException If bean could not be unregistered.
*/
protected final void unregisterMBean() throws IgniteSpiException {
// Unregister SPI MBean.
if (spiMBean != null && ignite != null) {
assert !U.IGNITE_MBEANS_DISABLED;
MBeanServer jmx = ignite.configuration().getMBeanServer();
assert jmx != null;
try {
jmx.unregisterMBean(spiMBean);
if (log.isDebugEnabled())
log.debug("Unregistered SPI MBean: " + spiMBean);
}
catch (JMException e) {
throw new IgniteSpiException("Failed to unregister SPI MBean: " + spiMBean, e);
}
}
}
/**
* @return {@code True} if node is stopping.
*/
protected final boolean isNodeStopping() {
return spiCtx.isStopping();
}
/**
* @return {@code true} if this check is optional.
*/
private boolean checkOptional() {
IgniteSpiConsistencyChecked ann = U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class);
return ann != null && ann.optional();
}
/**
* @return {@code true} if this check is enabled.
*/
private boolean checkEnabled() {
return U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class) != null;
}
/**
* @return {@code true} if client cluster nodes should be checked.
*/
private boolean checkClient() {
IgniteSpiConsistencyChecked ann = U.getAnnotation(getClass(), IgniteSpiConsistencyChecked.class);
return ann != null && ann.checkClient();
}
/**
* Method which is called in the end of checkConfigurationConsistency() method. May be overriden in SPIs.
*
* @param spiCtx SPI context.
* @param node Remote node.
* @param starting If this node is starting or not.
* @throws IgniteSpiException in case of errors.
*/
protected void checkConfigurationConsistency0(IgniteSpiContext spiCtx, ClusterNode node, boolean starting)
throws IgniteSpiException {
// No-op.
}
/**
* Checks remote node SPI configuration and prints warnings if necessary.
*
* @param spiCtx SPI context.
* @param node Remote node.
* @param starting Flag indicating whether this method is called during SPI start or not.
* @throws IgniteSpiException If check fatally failed.
*/
@SuppressWarnings("IfMayBeConditional")
private void checkConfigurationConsistency(IgniteSpiContext spiCtx, ClusterNode node, boolean starting)
throws IgniteSpiException {
assert spiCtx != null;
assert node != null;
/*
* Optional SPI means that we should not print warning if SPIs are different but
* still need to compare attributes if SPIs are the same.
*/
boolean optional = checkOptional();
boolean enabled = checkEnabled();
boolean checkClient = checkClient();
if (!enabled)
return;
if (!checkClient && (getLocalNode().isClient() || node.isClient()))
return;
String clsAttr = createSpiAttributeName(IgniteNodeAttributes.ATTR_SPI_CLASS);
String name = getName();
SB sb = new SB();
/*
* If there are any attributes do compare class and version
* (do not print warning for the optional SPIs).
*/
/* Check SPI class and version. */
String locCls = spiCtx.localNode().attribute(clsAttr);
String rmtCls = node.attribute(clsAttr);
assert locCls != null : "Local SPI class name attribute not found: " + clsAttr;
boolean isSpiConsistent = false;
String tipStr = " (fix configuration or set " +
"-D" + IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK + "=true system property)";
if (rmtCls == null) {
if (!optional && starting)
throw new IgniteSpiException("Remote SPI with the same name is not configured" + tipStr +
" [name=" + name + ", loc=" + locCls + ", locNode=" + spiCtx.localNode() + ", rmt=" + rmtCls +
", rmtNode=" + node + ']');
sb.a(format(">>> Remote SPI with the same name is not configured: " + name, locCls));
}
else if (!locCls.equals(rmtCls)) {
if (!optional && starting)
throw new IgniteSpiException("Remote SPI with the same name is of different type" + tipStr +
" [name=" + name + ", loc=" + locCls + ", rmt=" + rmtCls + ']');
sb.a(format(">>> Remote SPI with the same name is of different type: " + name, locCls, rmtCls));
}
else
isSpiConsistent = true;
// It makes no sense to compare inconsistent SPIs attributes.
if (!optional && isSpiConsistent) {
List<String> attrs = getConsistentAttributeNames();
// Process all SPI specific attributes.
for (String attr : attrs) {
// Ignore class and version attributes processed above.
if (!attr.equals(clsAttr)) {
// This check is considered as optional if no attributes
Object rmtVal = node.attribute(attr);
Object locVal = spiCtx.localNode().attribute(attr);
if (locVal == null && rmtVal == null)
continue;
if (locVal == null || rmtVal == null || !locVal.equals(rmtVal))
sb.a(format(">>> Remote node has different " + getName() + " SPI attribute " +
attr, locVal, rmtVal));
}
}
}
if (sb.length() > 0) {
String msg;
if (starting)
msg = U.nl() + U.nl() +
">>> +--------------------------------------------------------------------+" + U.nl() +
">>> + Courtesy notice that starting node has inconsistent configuration. +" + U.nl() +
">>> + Ignore this message if you are sure that this is done on purpose. +" + U.nl() +
">>> +--------------------------------------------------------------------+" + U.nl() +
">>> Remote Node ID: " + node.id().toString().toUpperCase() + U.nl() + sb;
else
msg = U.nl() + U.nl() +
">>> +-------------------------------------------------------------------+" + U.nl() +
">>> + Courtesy notice that joining node has inconsistent configuration. +" + U.nl() +
">>> + Ignore this message if you are sure that this is done on purpose. +" + U.nl() +
">>> +-------------------------------------------------------------------+" + U.nl() +
">>> Remote Node ID: " + node.id().toString().toUpperCase() + U.nl() + sb;
U.courtesy(log, msg);
}
}
/**
* Returns back a list of attributes that should be consistent
* for this SPI. Consistency means that remote node has to
* have the same attribute with the same value.
*
* @return List or attribute names.
*/
protected List<String> getConsistentAttributeNames() {
return Collections.emptyList();
}
/**
* Creates new name for the given attribute. Name contains
* SPI name prefix.
*
* @param attrName SPI attribute name.
* @return New name with SPI name prefix.
*/
protected String createSpiAttributeName(String attrName) {
return U.spiAttribute(this, attrName);
}
/**
* @param obj Timeout object.
* @see IgniteSpiContext#addTimeoutObject(IgniteSpiTimeoutObject)
*/
protected void addTimeoutObject(IgniteSpiTimeoutObject obj) {
spiCtx.addTimeoutObject(obj);
}
/**
* @param obj Timeout object.
* @see IgniteSpiContext#removeTimeoutObject(IgniteSpiTimeoutObject)
*/
protected void removeTimeoutObject(IgniteSpiTimeoutObject obj) {
spiCtx.removeTimeoutObject(obj);
}
/**
* Initiates and checks failure detection timeout value.
*/
protected void initFailureDetectionTimeout() {
if (failureDetectionTimeoutEnabled) {
failureDetectionTimeout = ignite.configuration().getFailureDetectionTimeout();
if (failureDetectionTimeout <= 0)
throw new IgniteSpiException("Invalid failure detection timeout value: " + failureDetectionTimeout);
else if (failureDetectionTimeout <= 10)
// Because U.currentTimeInMillis() is updated once in 10 milliseconds.
log.warning("Failure detection timeout is too low, it may lead to unpredictable behaviour " +
"[failureDetectionTimeout=" + failureDetectionTimeout + ']');
else if (failureDetectionTimeout <= ignite.configuration().getMetricsUpdateFrequency())
log.warning("'IgniteConfiguration.failureDetectionTimeout' should be greater then " +
"'IgniteConfiguration.metricsUpdateFrequency' to prevent unnecessary status checking.");
}
// Intentionally compare references using '!=' below
else if (ignite.configuration().getFailureDetectionTimeout() != DFLT_FAILURE_DETECTION_TIMEOUT)
log.warning("Failure detection timeout will be ignored (one of SPI parameters has been set explicitly)");
clientFailureDetectionTimeout = ignite.configuration().getClientFailureDetectionTimeout();
if (clientFailureDetectionTimeout <= 0)
throw new IgniteSpiException("Invalid client failure detection timeout value: " +
clientFailureDetectionTimeout);
else if (clientFailureDetectionTimeout <= 10)
// Because U.currentTimeInMillis() is updated once in 10 milliseconds.
log.warning("Client failure detection timeout is too low, it may lead to unpredictable behaviour " +
"[clientFailureDetectionTimeout=" + clientFailureDetectionTimeout + ']');
if (clientFailureDetectionTimeout < ignite.configuration().getMetricsUpdateFrequency())
throw new IgniteSpiException("Inconsistent configuration " +
"('IgniteConfiguration.clientFailureDetectionTimeout' must be greater or equal to " +
"'IgniteConfiguration.metricsUpdateFrequency').");
}
/**
* Enables or disables failure detection timeout.
*
* @param enabled {@code true} if enable, {@code false} otherwise.
*/
public void failureDetectionTimeoutEnabled(boolean enabled) {
failureDetectionTimeoutEnabled = enabled;
}
/**
* Checks whether failure detection timeout is enabled for this {@link IgniteSpi}.
*
* @return {@code true} if enabled, {@code false} otherwise.
*/
public boolean failureDetectionTimeoutEnabled() {
return failureDetectionTimeoutEnabled;
}
/**
* Returns client failure detection timeout set to use for network related operations.
*
* @return client failure detection timeout in milliseconds or {@code 0} if the timeout is disabled.
*/
public long clientFailureDetectionTimeout() {
return clientFailureDetectionTimeout;
}
/**
* Returns failure detection timeout set to use for network related operations.
*
* @return failure detection timeout in milliseconds or {@code 0} if the timeout is disabled.
*/
public long failureDetectionTimeout() {
return failureDetectionTimeout;
}
/**
* Temporarily SPI context.
*/
private class GridDummySpiContext implements IgniteSpiContext {
/** */
private final ClusterNode locNode;
/** */
private final boolean stopping;
/** */
private final MessageFactory msgFactory;
/** */
private final MessageFormatter msgFormatter;
/**
* Create temp SPI context.
*
* @param locNode Local node.
* @param stopping Node stopping flag.
* @param spiCtx SPI context.
*/
GridDummySpiContext(ClusterNode locNode, boolean stopping, @Nullable IgniteSpiContext spiCtx) {
this.locNode = locNode;
this.stopping = stopping;
MessageFactory msgFactory0 = spiCtx != null ? spiCtx.messageFactory() : null;
MessageFormatter msgFormatter0 = spiCtx != null ? spiCtx.messageFormatter() : null;
if (msgFactory0 == null) {
msgFactory0 = new MessageFactory() {
@Override public void register(short directType, Supplier<Message> supplier) throws IgniteException {
throw new IgniteException("Failed to register message, node is not started.");
}
@Override public void register(short directType, Supplier<Message> supplier,
MessageSerializer serializer) throws IgniteException {
throw new IgniteException("Failed to register message, node is not started.");
}
@Nullable @Override public Message create(short type) {
throw new IgniteException("Failed to read message, node is not started.");
}
@Override public MessageSerializer serializer(short type) {
throw new IgniteException("Failed to register message, node is not started.");
}
};
}
if (msgFormatter0 == null) {
msgFormatter0 = new MessageFormatter() {
@Override public MessageWriter writer(UUID rmtNodeId, MessageFactory msgFactory) {
throw new IgniteException("Failed to write message, node is not started.");
}
@Override public MessageReader reader(UUID rmtNodeId, MessageFactory msgFactory) {
throw new IgniteException("Failed to read message, node is not started.");
}
};
}
this.msgFactory = msgFactory0;
this.msgFormatter = msgFormatter0;
}
/** {@inheritDoc} */
@Override public void addLocalEventListener(GridLocalEventListener lsnr, int... types) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void addMessageListener(GridMessageListener lsnr, String topic) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void addLocalMessageListener(Object topic, IgniteBiPredicate<UUID, ?> p) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void recordEvent(Event evt) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void registerPort(int port, IgnitePortProtocol proto) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void deregisterPort(int port, IgnitePortProtocol proto) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public void deregisterPorts() {
/* No-op. */
}
/** {@inheritDoc} */
@Override public <K, V> V get(String cacheName, K key) {
return null;
}
/** {@inheritDoc} */
@Override public <K, V> V put(String cacheName, K key, V val, long ttl) {
return null;
}
/** {@inheritDoc} */
@Override public <K, V> V putIfAbsent(String cacheName, K key, V val, long ttl) {
return null;
}
/** {@inheritDoc} */
@Override public <K, V> V remove(String cacheName, K key) {
return null;
}
/** {@inheritDoc} */
@Override public <K> boolean containsKey(String cacheName, K key) {
return false;
}
/** {@inheritDoc} */
@Override public int partition(String cacheName, Object key) {
return -1;
}
/** {@inheritDoc} */
@Override public Collection<ClusterNode> nodes() {
return locNode == null ? Collections.emptyList() : Collections.singletonList(locNode);
}
/** {@inheritDoc} */
@Override public ClusterNode localNode() {
return locNode;
}
/** {@inheritDoc} */
@Nullable @Override public ClusterNode node(UUID nodeId) {
return null;
}
/** {@inheritDoc} */
@Override public Collection<ClusterNode> remoteNodes() {
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override public boolean pingNode(UUID nodeId) {
return locNode != null && nodeId.equals(locNode.id());
}
/** {@inheritDoc} */
@Override public boolean removeLocalEventListener(GridLocalEventListener lsnr) {
return false;
}
/** {@inheritDoc} */
@Override public boolean isEventRecordable(int... types) {
return true;
}
/** {@inheritDoc} */
@Override public void removeLocalMessageListener(Object topic, IgniteBiPredicate<UUID, ?> p) {
/* No-op. */
}
/** {@inheritDoc} */
@Override public boolean removeMessageListener(GridMessageListener lsnr, String topic) {
return false;
}
/** {@inheritDoc} */
@Override public void send(ClusterNode node, Object msg, String topic) {
/* No-op. */
}
/** {@inheritDoc} */
@Nullable @Override public IgniteNodeValidationResult validateNode(ClusterNode node) {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public IgniteNodeValidationResult validateNode(ClusterNode node, DiscoveryDataBag discoData) {
return null;
}
/** {@inheritDoc} */
@Override public Collection<SecuritySubject> authenticatedSubjects() {
return Collections.emptyList();
}
/** {@inheritDoc} */
@Override public SecuritySubject authenticatedSubject(UUID subjId) {
return null;
}
/** {@inheritDoc} */
@Override public MessageFormatter messageFormatter() {
return msgFormatter;
}
/** {@inheritDoc} */
@Override public MessageFactory messageFactory() {
return msgFactory;
}
/** {@inheritDoc} */
@Override public boolean isStopping() {
return stopping;
}
/** {@inheritDoc} */
@Override public boolean tryFailNode(UUID nodeId, @Nullable String warning) {
return false;
}
/** {@inheritDoc} */
@Override public void failNode(UUID nodeId, @Nullable String warning) {
// No-op.
}
/** {@inheritDoc} */
@Override public void addTimeoutObject(IgniteSpiTimeoutObject obj) {
Ignite ignite0 = ignite;
if (!(ignite0 instanceof IgniteKernal))
throw new IgniteSpiException("Wrong Ignite instance is set: " + ignite0);
((IgniteEx)ignite0).context().timeout().addTimeoutObject(new GridSpiTimeoutObject(obj));
}
/** {@inheritDoc} */
@Override public void removeTimeoutObject(IgniteSpiTimeoutObject obj) {
Ignite ignite0 = ignite;
if (!(ignite0 instanceof IgniteKernal))
throw new IgniteSpiException("Wrong Ignite instance is set: " + ignite0);
((IgniteEx)ignite0).context().timeout().removeTimeoutObject(new GridSpiTimeoutObject(obj));
}
/** {@inheritDoc} */
@Override public Map<String, Object> nodeAttributes() {
return Collections.emptyMap();
}
/** {@inheritDoc} */
@Override public boolean communicationFailureResolveSupported() {
return false;
}
/** {@inheritDoc} */
@Override public void resolveCommunicationFailure(ClusterNode node, Exception err) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public ReadOnlyMetricRegistry getOrCreateMetricRegistry(String name) {
return null;
}
/** {@inheritDoc} */
@Override public void removeMetricRegistry(String name) {
// No-op.
}
/** {@inheritDoc} */
@Override public Iterable<ReadOnlyMetricRegistry> metricRegistries() {
return null;
}
/** {@inheritDoc} */
@Override public void addMetricRegistryCreationListener(Consumer<ReadOnlyMetricRegistry> lsnr) {
// No-op.
}
}
}
|
googleapis/google-cloud-java | 35,197 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CorroborateContentResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/vertex_rag_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for CorroborateContent.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CorroborateContentResponse}
*/
public final class CorroborateContentResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CorroborateContentResponse)
CorroborateContentResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use CorroborateContentResponse.newBuilder() to construct.
private CorroborateContentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CorroborateContentResponse() {
claims_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CorroborateContentResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VertexRagServiceProto
.internal_static_google_cloud_aiplatform_v1_CorroborateContentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VertexRagServiceProto
.internal_static_google_cloud_aiplatform_v1_CorroborateContentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CorroborateContentResponse.class,
com.google.cloud.aiplatform.v1.CorroborateContentResponse.Builder.class);
}
private int bitField0_;
public static final int CORROBORATION_SCORE_FIELD_NUMBER = 1;
private float corroborationScore_ = 0F;
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @return Whether the corroborationScore field is set.
*/
@java.lang.Override
public boolean hasCorroborationScore() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @return The corroborationScore.
*/
@java.lang.Override
public float getCorroborationScore() {
return corroborationScore_;
}
public static final int CLAIMS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Claim> claims_;
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Claim> getClaimsList() {
return claims_;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.ClaimOrBuilder>
getClaimsOrBuilderList() {
return claims_;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
@java.lang.Override
public int getClaimsCount() {
return claims_.size();
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Claim getClaims(int index) {
return claims_.get(index);
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ClaimOrBuilder getClaimsOrBuilder(int index) {
return claims_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeFloat(1, corroborationScore_);
}
for (int i = 0; i < claims_.size(); i++) {
output.writeMessage(2, claims_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, corroborationScore_);
}
for (int i = 0; i < claims_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, claims_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.CorroborateContentResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CorroborateContentResponse other =
(com.google.cloud.aiplatform.v1.CorroborateContentResponse) obj;
if (hasCorroborationScore() != other.hasCorroborationScore()) return false;
if (hasCorroborationScore()) {
if (java.lang.Float.floatToIntBits(getCorroborationScore())
!= java.lang.Float.floatToIntBits(other.getCorroborationScore())) return false;
}
if (!getClaimsList().equals(other.getClaimsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCorroborationScore()) {
hash = (37 * hash) + CORROBORATION_SCORE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getCorroborationScore());
}
if (getClaimsCount() > 0) {
hash = (37 * hash) + CLAIMS_FIELD_NUMBER;
hash = (53 * hash) + getClaimsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.CorroborateContentResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for CorroborateContent.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CorroborateContentResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CorroborateContentResponse)
com.google.cloud.aiplatform.v1.CorroborateContentResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VertexRagServiceProto
.internal_static_google_cloud_aiplatform_v1_CorroborateContentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VertexRagServiceProto
.internal_static_google_cloud_aiplatform_v1_CorroborateContentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CorroborateContentResponse.class,
com.google.cloud.aiplatform.v1.CorroborateContentResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.CorroborateContentResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
corroborationScore_ = 0F;
if (claimsBuilder_ == null) {
claims_ = java.util.Collections.emptyList();
} else {
claims_ = null;
claimsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.VertexRagServiceProto
.internal_static_google_cloud_aiplatform_v1_CorroborateContentResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CorroborateContentResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CorroborateContentResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CorroborateContentResponse build() {
com.google.cloud.aiplatform.v1.CorroborateContentResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CorroborateContentResponse buildPartial() {
com.google.cloud.aiplatform.v1.CorroborateContentResponse result =
new com.google.cloud.aiplatform.v1.CorroborateContentResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.CorroborateContentResponse result) {
if (claimsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
claims_ = java.util.Collections.unmodifiableList(claims_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.claims_ = claims_;
} else {
result.claims_ = claimsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.CorroborateContentResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.corroborationScore_ = corroborationScore_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.CorroborateContentResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.CorroborateContentResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.CorroborateContentResponse other) {
if (other == com.google.cloud.aiplatform.v1.CorroborateContentResponse.getDefaultInstance())
return this;
if (other.hasCorroborationScore()) {
setCorroborationScore(other.getCorroborationScore());
}
if (claimsBuilder_ == null) {
if (!other.claims_.isEmpty()) {
if (claims_.isEmpty()) {
claims_ = other.claims_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureClaimsIsMutable();
claims_.addAll(other.claims_);
}
onChanged();
}
} else {
if (!other.claims_.isEmpty()) {
if (claimsBuilder_.isEmpty()) {
claimsBuilder_.dispose();
claimsBuilder_ = null;
claims_ = other.claims_;
bitField0_ = (bitField0_ & ~0x00000002);
claimsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getClaimsFieldBuilder()
: null;
} else {
claimsBuilder_.addAllMessages(other.claims_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 13:
{
corroborationScore_ = input.readFloat();
bitField0_ |= 0x00000001;
break;
} // case 13
case 18:
{
com.google.cloud.aiplatform.v1.Claim m =
input.readMessage(
com.google.cloud.aiplatform.v1.Claim.parser(), extensionRegistry);
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
claims_.add(m);
} else {
claimsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private float corroborationScore_;
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @return Whether the corroborationScore field is set.
*/
@java.lang.Override
public boolean hasCorroborationScore() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @return The corroborationScore.
*/
@java.lang.Override
public float getCorroborationScore() {
return corroborationScore_;
}
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @param value The corroborationScore to set.
* @return This builder for chaining.
*/
public Builder setCorroborationScore(float value) {
corroborationScore_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Confidence score of corroborating content. Value is [0,1] with 1 is the
* most confidence.
* </pre>
*
* <code>optional float corroboration_score = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCorroborationScore() {
bitField0_ = (bitField0_ & ~0x00000001);
corroborationScore_ = 0F;
onChanged();
return this;
}
private java.util.List<com.google.cloud.aiplatform.v1.Claim> claims_ =
java.util.Collections.emptyList();
private void ensureClaimsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
claims_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Claim>(claims_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Claim,
com.google.cloud.aiplatform.v1.Claim.Builder,
com.google.cloud.aiplatform.v1.ClaimOrBuilder>
claimsBuilder_;
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Claim> getClaimsList() {
if (claimsBuilder_ == null) {
return java.util.Collections.unmodifiableList(claims_);
} else {
return claimsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public int getClaimsCount() {
if (claimsBuilder_ == null) {
return claims_.size();
} else {
return claimsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Claim getClaims(int index) {
if (claimsBuilder_ == null) {
return claims_.get(index);
} else {
return claimsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder setClaims(int index, com.google.cloud.aiplatform.v1.Claim value) {
if (claimsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureClaimsIsMutable();
claims_.set(index, value);
onChanged();
} else {
claimsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder setClaims(
int index, com.google.cloud.aiplatform.v1.Claim.Builder builderForValue) {
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
claims_.set(index, builderForValue.build());
onChanged();
} else {
claimsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder addClaims(com.google.cloud.aiplatform.v1.Claim value) {
if (claimsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureClaimsIsMutable();
claims_.add(value);
onChanged();
} else {
claimsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder addClaims(int index, com.google.cloud.aiplatform.v1.Claim value) {
if (claimsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureClaimsIsMutable();
claims_.add(index, value);
onChanged();
} else {
claimsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder addClaims(com.google.cloud.aiplatform.v1.Claim.Builder builderForValue) {
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
claims_.add(builderForValue.build());
onChanged();
} else {
claimsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder addClaims(
int index, com.google.cloud.aiplatform.v1.Claim.Builder builderForValue) {
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
claims_.add(index, builderForValue.build());
onChanged();
} else {
claimsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder addAllClaims(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Claim> values) {
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, claims_);
onChanged();
} else {
claimsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder clearClaims() {
if (claimsBuilder_ == null) {
claims_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
claimsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public Builder removeClaims(int index) {
if (claimsBuilder_ == null) {
ensureClaimsIsMutable();
claims_.remove(index);
onChanged();
} else {
claimsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Claim.Builder getClaimsBuilder(int index) {
return getClaimsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public com.google.cloud.aiplatform.v1.ClaimOrBuilder getClaimsOrBuilder(int index) {
if (claimsBuilder_ == null) {
return claims_.get(index);
} else {
return claimsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.ClaimOrBuilder>
getClaimsOrBuilderList() {
if (claimsBuilder_ != null) {
return claimsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(claims_);
}
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Claim.Builder addClaimsBuilder() {
return getClaimsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Claim.getDefaultInstance());
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public com.google.cloud.aiplatform.v1.Claim.Builder addClaimsBuilder(int index) {
return getClaimsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Claim.getDefaultInstance());
}
/**
*
*
* <pre>
* Claims that are extracted from the input content and facts that support the
* claims.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Claim claims = 2;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Claim.Builder> getClaimsBuilderList() {
return getClaimsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Claim,
com.google.cloud.aiplatform.v1.Claim.Builder,
com.google.cloud.aiplatform.v1.ClaimOrBuilder>
getClaimsFieldBuilder() {
if (claimsBuilder_ == null) {
claimsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Claim,
com.google.cloud.aiplatform.v1.Claim.Builder,
com.google.cloud.aiplatform.v1.ClaimOrBuilder>(
claims_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
claims_ = null;
}
return claimsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CorroborateContentResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CorroborateContentResponse)
private static final com.google.cloud.aiplatform.v1.CorroborateContentResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CorroborateContentResponse();
}
public static com.google.cloud.aiplatform.v1.CorroborateContentResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CorroborateContentResponse> PARSER =
new com.google.protobuf.AbstractParser<CorroborateContentResponse>() {
@java.lang.Override
public CorroborateContentResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CorroborateContentResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CorroborateContentResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CorroborateContentResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,227 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/ListOnlineReturnPoliciesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/online_return_policy.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Request message for the `ListOnlineReturnPolicies` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest}
*/
public final class ListOnlineReturnPoliciesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest)
ListOnlineReturnPoliciesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListOnlineReturnPoliciesRequest.newBuilder() to construct.
private ListOnlineReturnPoliciesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListOnlineReturnPoliciesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListOnlineReturnPoliciesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListOnlineReturnPoliciesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListOnlineReturnPoliciesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest.class,
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest.Builder
.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of `OnlineReturnPolicy` resources to return.
* The service returns fewer than this value if the number of return policies
* for the given merchant is less that than the `pageSize`. The default value
* is 10. The maximum value is 100; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest other =
(com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `ListOnlineReturnPolicies` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest)
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListOnlineReturnPoliciesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListOnlineReturnPoliciesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest.class,
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest.Builder
.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.OnlineReturnPolicyProto
.internal_static_google_shopping_merchant_accounts_v1beta_ListOnlineReturnPoliciesRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest build() {
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
buildPartial() {
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest result =
new com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The merchant account for which to list return policies.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of `OnlineReturnPolicy` resources to return.
* The service returns fewer than this value if the number of return policies
* for the given merchant is less that than the `pageSize`. The default value
* is 10. The maximum value is 100; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of `OnlineReturnPolicy` resources to return.
* The service returns fewer than this value if the number of return policies
* for the given merchant is less that than the `pageSize`. The default value
* is 10. The maximum value is 100; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of `OnlineReturnPolicy` resources to return.
* The service returns fewer than this value if the number of return policies
* for the given merchant is less that than the `pageSize`. The default value
* is 10. The maximum value is 100; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListOnlineReturnPolicies`
* call. Provide the page token to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* `ListOnlineReturnPolicies` must match the call that provided the page
* token. The token returned as
* [nextPageToken][google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest)
private static final com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest();
}
public static com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListOnlineReturnPoliciesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListOnlineReturnPoliciesRequest>() {
@java.lang.Override
public ListOnlineReturnPoliciesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListOnlineReturnPoliciesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListOnlineReturnPoliciesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.ListOnlineReturnPoliciesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/marmotta | 35,277 | libraries/kiwi/kiwi-triplestore/src/test/java/org/apache/marmotta/kiwi/test/RepositoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.marmotta.kiwi.test;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import info.aduna.iteration.Iterations;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.marmotta.commons.sesame.repository.ResourceUtils;
import org.apache.marmotta.kiwi.config.KiWiConfiguration;
import org.apache.marmotta.kiwi.sail.KiWiStore;
import org.apache.marmotta.kiwi.test.junit.KiWiDatabaseRunner;
import org.hamcrest.CoreMatchers;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openrdf.model.*;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update;
import org.openrdf.query.UpdateExecutionException;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ConcurrentModificationException;
import java.util.List;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.is;
import static org.junit.Assume.assumeThat;
/**
* Test the Sesame repository functionality backed by the KiWi triple store.
*
* @author Sebastian Schaffert (sschaffert@apache.org)
*/
@RunWith(KiWiDatabaseRunner.class)
public class RepositoryTest {
private static Logger log = LoggerFactory.getLogger(RepositoryTest.class);
private Repository repository;
private KiWiStore store;
private final KiWiConfiguration kiwiConfiguration;
public RepositoryTest(KiWiConfiguration kiwiConfiguration) {
this.kiwiConfiguration = kiwiConfiguration;
}
@Before
public void initDatabase() throws RepositoryException {
store = new KiWiStore(kiwiConfiguration);
store.setDropTablesOnShutdown(true);
repository = new SailRepository(store);
repository.initialize();
}
@After
public void dropDatabase() throws RepositoryException, SQLException {
repository.shutDown();
}
/**
* Test importing data; the test will load a small sample RDF file and check whether the expected resources are
* present.
*
* @throws RepositoryException
* @throws RDFParseException
* @throws IOException
*/
@Test
public void testImport() throws RepositoryException, RDFParseException, IOException {
long start, end;
start = System.currentTimeMillis();
// load demo data
InputStream rdfXML = this.getClass().getResourceAsStream("demo-data.foaf");
assumeThat("Could not load test-data: demo-data.foaf", rdfXML, notNullValue(InputStream.class));
RepositoryConnection connectionRDF = repository.getConnection();
try {
connectionRDF.add(rdfXML, "http://localhost/foaf/", RDFFormat.RDFXML);
connectionRDF.commit();
} finally {
connectionRDF.close();
}
end = System.currentTimeMillis();
log.info("IMPORT: {} ms", end-start);
start = System.currentTimeMillis();
// get another connection and check if demo data is available
RepositoryConnection connection = repository.getConnection();
List<String> resources = ImmutableList.copyOf(
Iterables.transform(
ResourceUtils.listResources(connection),
new Function<Resource, String>() {
@Override
public String apply(Resource input) {
return input.stringValue();
}
}
)
);
// test if the result has the expected size
//FIXME: this test is no longer valid, because resource existance is not bound to use as subject
//Assert.assertEquals(4, resources.size());
// test if the result contains all resources that have been used as subject
Assert.assertThat(resources, hasItems(
"http://localhost:8080/LMF/resource/hans_meier",
"http://localhost:8080/LMF/resource/sepp_huber",
"http://localhost:8080/LMF/resource/anna_schmidt"
));
connection.commit();
connection.close();
end = System.currentTimeMillis();
log.info("QUERY EVALUATION: {} ms", end-start);
}
// TODO: test delete, test query,
/**
* Test setting, retrieving and updating namespaces through the repository API
* @throws RepositoryException
*/
@Test
@SuppressWarnings("unchecked")
public void testNamespaces() throws RepositoryException {
RepositoryConnection connection = repository.getConnection();
connection.begin();
connection.setNamespace("ns1","http://localhost/ns1/");
connection.setNamespace("ns2","http://localhost/ns2/");
connection.commit();
Assert.assertEquals("http://localhost/ns1/", connection.getNamespace("ns1"));
Assert.assertEquals("http://localhost/ns2/", connection.getNamespace("ns2"));
Assert.assertEquals(2, Iterations.asList(connection.getNamespaces()).size());
Assert.assertThat(
Iterations.asList(connection.getNamespaces()),
CoreMatchers.<Namespace>hasItems(
hasProperty("name", is("http://localhost/ns1/")),
hasProperty("name", is("http://localhost/ns2/"))
)
);
// update ns1 to a different URL
connection.begin();
connection.setNamespace("ns1","http://localhost/ns3/");
connection.commit();
Assert.assertEquals("http://localhost/ns3/", connection.getNamespace("ns1"));
Assert.assertThat(
Iterations.asList(connection.getNamespaces()),
CoreMatchers.<Namespace>hasItems(
hasProperty("name", is("http://localhost/ns3/")),
hasProperty("name", is("http://localhost/ns2/"))
)
);
// remove ns2
connection.begin();
connection.removeNamespace("ns2");
connection.commit();
connection.begin();
Assert.assertEquals(1, Iterations.asList(connection.getNamespaces()).size());
connection.commit();
connection.close();
}
@Test
public void testDeleteTriple() throws RepositoryException, RDFParseException, IOException {
// load demo data
InputStream rdfXML = this.getClass().getResourceAsStream("demo-data.foaf");
assumeThat("Could not load test-data: demo-data.foaf", rdfXML, notNullValue(InputStream.class));
RepositoryConnection connectionRDF = repository.getConnection();
try {
connectionRDF.add(rdfXML, "http://localhost/foaf/", RDFFormat.RDFXML);
connectionRDF.commit();
} finally {
connectionRDF.close();
}
// get another connection and check if demo data is available
RepositoryConnection connection = repository.getConnection();
try {
connection.begin();
List<String> resources = ImmutableList.copyOf(
Iterables.transform(
ResourceUtils.listResources(connection),
new Function<Resource, String>() {
@Override
public String apply(Resource input) {
return input.stringValue();
}
}
)
);
// test if the result has the expected size
// FIXME: MARMOTTA-39 (no xsd:string, so one resource is "missing")
// Assert.assertEquals(31, resources.size());
Assert.assertEquals(30, resources.size());
// test if the result contains all resources that have been used as subject
Assert.assertThat(resources, hasItems(
"http://localhost:8080/LMF/resource/hans_meier",
"http://localhost:8080/LMF/resource/sepp_huber",
"http://localhost:8080/LMF/resource/anna_schmidt"
));
long oldsize = connection.size();
connection.commit();
// remove a resource and all its triples
connection.begin();
ResourceUtils.removeResource(connection, connection.getValueFactory().createURI("http://localhost:8080/LMF/resource/hans_meier"));
connection.commit();
connection.begin();
long newsize = connection.size();
// new size should be less, since we removed some triples
Assert.assertThat(newsize, lessThan(oldsize));
// the resource hans_meier should not be contained in the list of resources
List<String> resources2 = ImmutableList.copyOf(
Iterables.transform(
ResourceUtils.listSubjects(connection),
new Function<Resource, String>() {
@Override
public String apply(Resource input) {
return input.stringValue();
}
}
)
);
// test if the result has the expected size
//Assert.assertEquals(3, resources2.size());
// test if the result does not contain the removed resource
Assert.assertThat(resources2, not(hasItem(
"http://localhost:8080/LMF/resource/hans_meier"
)));
} finally {
connection.commit();
connection.close();
}
}
/**
* Test a repeated addition of the same triple, because this is a special case in the database.
*/
@Test
public void testRepeatedAdd() throws RepositoryException, IOException, RDFParseException {
// load demo data
InputStream rdfXML = this.getClass().getResourceAsStream("srfg-ontology.rdf");
assumeThat("Could not load test-data: srfg-ontology.rdf", rdfXML, notNullValue(InputStream.class));
long oldsize, newsize;
List<Statement> oldTriples, newTriples;
RepositoryConnection connectionRDF = repository.getConnection();
try {
connectionRDF.begin();
connectionRDF.add(rdfXML, "http://localhost/srfg/", RDFFormat.RDFXML);
connectionRDF.commit();
oldTriples = Iterations.asList(connectionRDF.getStatements(null,null,null,true));
oldsize = connectionRDF.size();
} finally {
connectionRDF.close();
}
// get another connection and add the same data again
rdfXML = this.getClass().getResourceAsStream("srfg-ontology.rdf");
RepositoryConnection connection = repository.getConnection();
try {
connection.begin();
connection.add(rdfXML, "http://localhost/srfg/", RDFFormat.RDFXML);
connection.commit();
newTriples = Iterations.asList(connection.getStatements(null,null,null,true));
newsize = connection.size();
} finally {
connection.commit();
connection.close();
}
Assert.assertEquals(oldTriples,newTriples);
Assert.assertEquals(oldsize,newsize);
}
/**
* Test adding-deleting-adding a triple
*
* @throws Exception
*/
@Test
public void testRepeatedAddRemove() throws Exception {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
connection1.commit();
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.commit();
} finally {
connection1.close();
}
Literal object2 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection2 = repository.getConnection();
try {
Assert.assertTrue(connection2.hasStatement(subject,predicate,object2,true));
connection2.remove(subject,predicate,object2);
connection2.commit();
Assert.assertFalse(connection2.hasStatement(subject,predicate,object2,true));
connection2.commit();
} finally {
connection2.close();
}
Literal object3 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection3 = repository.getConnection();
try {
Assert.assertFalse(connection3.hasStatement(subject,predicate,object3,true));
connection3.add(subject,predicate,object3);
connection3.commit();
Assert.assertTrue(connection3.hasStatement(subject,predicate,object3,true));
connection3.commit();
} finally {
connection3.close();
}
Literal object4 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection4 = repository.getConnection();
try {
Assert.assertTrue(connection4.hasStatement(subject,predicate,object4,true));
connection4.commit();
} finally {
connection4.close();
}
}
/**
* Test adding-deleting-adding a triple
*
* @throws Exception
*/
@Test
public void testRepeatedAddRemoveTransaction() throws Exception {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
connection1.commit();
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.commit();
} finally {
connection1.close();
}
Literal object2 = repository.getValueFactory().createLiteral(value);
Literal object3 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection2 = repository.getConnection();
try {
Assert.assertTrue(connection2.hasStatement(subject,predicate,object2,true));
connection2.remove(subject,predicate,object2);
Assert.assertFalse(connection2.hasStatement(subject,predicate,object2,true));
connection2.add(subject,predicate,object3);
Assert.assertTrue(connection2.hasStatement(subject,predicate,object3,true));
connection2.commit();
} finally {
connection2.close();
}
Literal object4 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection4 = repository.getConnection();
try {
Assert.assertTrue(connection4.hasStatement(subject,predicate,object4,true));
connection4.commit();
} finally {
connection4.close();
}
// test repeated adding/removing inside the same transaction
Literal object5 = repository.getValueFactory().createLiteral(RandomStringUtils.randomAlphanumeric(8));
RepositoryConnection connection5 = repository.getConnection();
try {
Assert.assertFalse(connection5.hasStatement(subject, predicate, object5, true));
connection5.add(subject,predicate,object5);
Assert.assertTrue(connection5.hasStatement(subject,predicate,object5,true));
connection5.remove(subject,predicate,object5);
Assert.assertFalse(connection5.hasStatement(subject,predicate,object5,true));
connection5.add(subject,predicate,object5);
Assert.assertTrue(connection5.hasStatement(subject,predicate,object5,true));
connection5.commit();
} finally {
connection5.close();
}
RepositoryConnection connection6 = repository.getConnection();
try {
Assert.assertTrue(connection6.hasStatement(subject, predicate, object5, true));
connection6.commit();
} finally {
connection6.close();
}
}
@Test
public void testRepeatedAddRemoveCrossTransaction() throws RepositoryException {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
connection1.commit();
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.commit();
} finally {
connection1.close();
}
RepositoryConnection connection2 = repository.getConnection();
try {
connection2.remove(subject, predicate, object1);
Assert.assertFalse(connection2.hasStatement(subject, predicate, object1, true));
connection2.add(subject,predicate,object1);
Assert.assertTrue(connection2.hasStatement(subject, predicate, object1, true));
connection2.commit();
} finally {
connection2.close();
}
RepositoryConnection connection3 = repository.getConnection();
try {
Assert.assertTrue(connection3.hasStatement(subject, predicate, object1, true));
connection3.commit();
} finally {
connection3.close();
}
}
@Test
public void testRepeatedAddRemoveSPARQL() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
connection1.commit();
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.commit();
} finally {
connection1.close();
}
RepositoryConnection connection2 = repository.getConnection();
try {
String query = String.format("DELETE { <%s> <%s> ?v } INSERT { <%s> <%s> ?v . } WHERE { <%s> <%s> ?v }", subject.stringValue(), predicate.stringValue(), subject.stringValue(), predicate.stringValue(), subject.stringValue(), predicate.stringValue());
Update u = connection2.prepareUpdate(QueryLanguage.SPARQL, query);
u.execute();
connection2.commit();
} finally {
connection2.close();
}
RepositoryConnection connection3 = repository.getConnection();
try {
Assert.assertTrue(connection3.hasStatement(subject, predicate, object1, true));
connection3.commit();
} finally {
connection3.close();
}
}
/**
* Test the rollback functionality of the triple store by adding a triple, rolling back, adding the triple again.
*
* @throws Exception
*/
@Test
public void testRollback() throws Exception {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.begin();
connection1.add(subject,predicate,object);
connection1.rollback();
} finally {
connection1.close();
}
RepositoryConnection connection2 = repository.getConnection();
try {
connection2.begin();
Assert.assertFalse(connection2.hasStatement(subject,predicate,object,true));
connection2.add(subject,predicate,object);
connection2.commit();
Assert.assertTrue(connection2.hasStatement(subject,predicate,object,true));
connection2.commit();
} finally {
connection2.close();
}
}
/**
* This test is for a strange bug that happens when running SPARQL updates that delete and reinsert a triple in
* the same transaction. See https://issues.apache.org/jira/browse/MARMOTTA-283
*/
@Test
public void testMARMOTTA283() throws RepositoryException, RDFParseException, IOException, MalformedQueryException, UpdateExecutionException {
InputStream rdfXML = this.getClass().getResourceAsStream("demo-data.foaf");
assumeThat("Could not load test-data: demo-data.foaf", rdfXML, notNullValue(InputStream.class));
RepositoryConnection connectionRDF = repository.getConnection();
try {
connectionRDF.add(rdfXML, "http://localhost/foaf/", RDFFormat.RDFXML);
connectionRDF.commit();
} finally {
connectionRDF.close();
}
String update = "DELETE { ?s ?p ?o } INSERT { <http://localhost:8080/LMF/resource/hans_meier> <http://xmlns.com/foaf/0.1/name> \"Hans Meier\" . <http://localhost:8080/LMF/resource/hans_meier> <http://xmlns.com/foaf/0.1/based_near> <http://dbpedia.org/resource/Traunstein> . <http://localhost:8080/LMF/resource/hans_meier> <http://xmlns.com/foaf/0.1/interest> <http://rdf.freebase.com/ns/en.linux> } WHERE { ?s ?p ?o . FILTER ( ?s = <http://localhost:8080/LMF/resource/hans_meier> ) }";
RepositoryConnection connectionUpdate = repository.getConnection();
try {
Update u = connectionUpdate.prepareUpdate(QueryLanguage.SPARQL, update);
u.execute();
connectionUpdate.commit();
} finally {
connectionUpdate.close();
}
// now there should be two triples
RepositoryConnection connectionVerify = repository.getConnection();
try {
URI hans_meier = repository.getValueFactory().createURI("http://localhost:8080/LMF/resource/hans_meier");
URI foaf_name = repository.getValueFactory().createURI("http://xmlns.com/foaf/0.1/name");
URI foaf_based_near = repository.getValueFactory().createURI("http://xmlns.com/foaf/0.1/based_near");
URI foaf_interest = repository.getValueFactory().createURI("http://xmlns.com/foaf/0.1/interest");
URI freebase_linux = repository.getValueFactory().createURI("http://rdf.freebase.com/ns/en.linux");
URI traunstein = repository.getValueFactory().createURI("http://dbpedia.org/resource/Traunstein");
Assert.assertTrue(connectionVerify.hasStatement(hans_meier,foaf_name,null, true));
Assert.assertTrue(connectionVerify.hasStatement(hans_meier,foaf_based_near,traunstein, true));
Assert.assertTrue(connectionVerify.hasStatement(hans_meier,foaf_interest,freebase_linux, true));
connectionVerify.commit();
} finally {
connectionVerify.close();
}
}
/**
* This test is for a strange bug that happens when running SPARQL updates that delete and reinsert a triple in
* the same transaction. It is similar to #testMARMOTTA283, but simulates the issue in more detail.
* See https://issues.apache.org/jira/browse/MARMOTTA-283
*/
@Test
public void testMARMOTTA283_2() throws RepositoryException, RDFParseException, IOException, MalformedQueryException, UpdateExecutionException {
//insert quadruples
String insert =
"WITH <http://resource.org/video>" +
"INSERT {" +
" <http://resource.org/video> <http://ontology.org#hasFragment> <http://resource.org/fragment1>." +
" <http://resource.org/annotation1> <http://ontology.org#hasTarget> <http://resource.org/fragment1>." +
" <http://resource.org/annotation1> <http://ontology.org#hasBody> <http://resource.org/subject1>." +
" <http://resource.org/fragment1> <http://ontology.org#shows> <http://resource.org/subject1>." +
"} WHERE {}";
RepositoryConnection connectionInsert = repository.getConnection();
try {
Update u = connectionInsert.prepareUpdate(QueryLanguage.SPARQL, insert);
u.execute();
connectionInsert.commit();
} finally {
connectionInsert.close();
}
//update quadruples
String update =
"WITH <http://resource.org/video>" +
"DELETE { " +
" ?annotation ?p ?v." +
" ?fragment ?r ?s." +
" <http://resource.org/video> <http://ontology.org#hasFragment> ?fragment." +
"} INSERT {" +
" <http://resource.org/video> <http://ontology.org#hasFragment> <http://resource.org/fragment1>." +
" <http://resource.org/annotation1> <http://ontology.org#hasTarget> <http://resource.org/fragment1>." +
" <http://resource.org/annotation1> <http://ontology.org#hasBody> <http://resource.org/subject1>." +
" <http://resource.org/fragment1> <http://ontology.org#shows> <http://resource.org/subject1>." +
"} WHERE {" +
" ?annotation <http://ontology.org#hasTarget> ?fragment." +
" ?annotation ?p ?v." +
" OPTIONAL {" +
" ?fragment ?r ?s" +
" }" +
" FILTER (?fragment = <http://resource.org/fragment1>)" +
"} ";
RepositoryConnection connectionUpdate = repository.getConnection();
try {
Update u = connectionUpdate.prepareUpdate(QueryLanguage.SPARQL, update);
u.execute();
connectionUpdate.commit();
} finally {
connectionUpdate.close();
}
//check quadruples
RepositoryConnection connectionVerify = repository.getConnection();
try {
URI video = repository.getValueFactory().createURI("http://resource.org/video");
URI hasFragment = repository.getValueFactory().createURI("http://ontology.org#hasFragment");
URI fragment = repository.getValueFactory().createURI("http://resource.org/fragment1");
URI annotation = repository.getValueFactory().createURI("http://resource.org/annotation1");
URI hasTarget = repository.getValueFactory().createURI("http://ontology.org#hasTarget");
URI hasBody = repository.getValueFactory().createURI("http://ontology.org#hasBody");
URI subject = repository.getValueFactory().createURI("http://resource.org/subject1");
URI shows = repository.getValueFactory().createURI("http://ontology.org#shows");
Assert.assertTrue(connectionVerify.hasStatement(video,hasFragment,fragment,true,video));
Assert.assertTrue(connectionVerify.hasStatement(annotation,hasTarget,fragment,true,video));
Assert.assertTrue(connectionVerify.hasStatement(annotation,hasBody,subject,true,video));
Assert.assertTrue(connectionVerify.hasStatement(fragment,shows,subject,true,video));
connectionVerify.commit();
} finally {
connectionVerify.close();
}
}
/**
* Test the concurrent connection problem reported in MARMOTTA-236 for facading:
* - get two parallel connections
* - add triple in connection 1; should be available in connection 1 and not in connection 2
* - add same triple in connection 2; should be available in both, connection 1 and connection 2 or
* fail-fast by throwing a ConcurrentModificationException
* @throws Exception
*/
@Test
public void testMARMOTTA236() throws Exception {
RepositoryConnection con1 = repository.getConnection();
RepositoryConnection con2 = repository.getConnection();
try {
URI r1 = repository.getValueFactory().createURI("http://localhost/"+ RandomStringUtils.randomAlphanumeric(8));
URI r2 = repository.getValueFactory().createURI("http://localhost/"+ RandomStringUtils.randomAlphanumeric(8));
URI r3 = repository.getValueFactory().createURI("http://localhost/"+ RandomStringUtils.randomAlphanumeric(8));
con1.begin();
con1.add(r1,r2,r3);
Assert.assertTrue(con1.hasStatement(r1,r2,r3,true));
con2.begin();
Assert.assertFalse(con2.hasStatement(r1,r2,r3,true));
con2.add(r1,r2,r3);
Assert.assertTrue(con2.hasStatement(r1,r2,r3,true));
con2.rollback();
con1.commit();
} catch (ConcurrentModificationException ex) {
} finally {
con1.close();
con2.close();
}
}
/**
* MARMOTTA-506 introduces a more efficient clearing of triples, which abandons some consistency guarantees. This
* test aims to check for any side effect of this change.
*
* @throws Exception
*/
@Test
public void testFastClearDifferentTransactions() throws Exception {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
connection1.commit();
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.commit();
} finally {
connection1.close();
}
RepositoryConnection connection2 = repository.getConnection();
try {
connection2.clear();
connection2.commit();
Assert.assertFalse(connection2.hasStatement(subject, predicate, object1, true));
connection2.commit();
} finally {
connection2.close();
}
RepositoryConnection connection3 = repository.getConnection();
try {
connection3.add(subject,predicate,object1);
connection3.commit();
Assert.assertTrue(connection3.hasStatement(subject, predicate, object1, true));
connection3.commit();
} finally {
connection3.close();
}
}
/**
* MARMOTTA-506 introduces a more efficient clearing of triples, which abandons some consistency guarantees. This
* test aims to check for any side effect of this change.
*
* @throws Exception
*/
@Test
public void testFastClearSameTransaction() throws Exception {
String value = RandomStringUtils.randomAlphanumeric(8);
URI subject = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
URI predicate = repository.getValueFactory().createURI("http://localhost/resource/" + RandomStringUtils.randomAlphanumeric(8));
Literal object1 = repository.getValueFactory().createLiteral(value);
RepositoryConnection connection1 = repository.getConnection();
try {
connection1.add(subject,predicate,object1);
Assert.assertTrue(connection1.hasStatement(subject,predicate,object1,true));
connection1.clear();
Assert.assertFalse(connection1.hasStatement(subject, predicate, object1, true));
connection1.add(subject,predicate,object1);
Assert.assertTrue(connection1.hasStatement(subject, predicate, object1, true));
connection1.commit();
} finally {
connection1.close();
}
}
}
|
oracle/graalpython | 34,877 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/compiler/Unparser.java | /*
* Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.compiler;
import static com.oracle.graal.python.util.PythonUtils.TS_ENCODING;
import static com.oracle.graal.python.util.PythonUtils.codePointsToTruffleString;
import static com.oracle.graal.python.util.PythonUtils.tsLiteral;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.builtins.objects.bytes.BytesUtils;
import com.oracle.graal.python.builtins.objects.floats.FloatBuiltins;
import com.oracle.graal.python.builtins.objects.str.StringNodes;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.pegparser.sst.AliasTy;
import com.oracle.graal.python.pegparser.sst.ArgTy;
import com.oracle.graal.python.pegparser.sst.ArgumentsTy;
import com.oracle.graal.python.pegparser.sst.BoolOpTy;
import com.oracle.graal.python.pegparser.sst.ComprehensionTy;
import com.oracle.graal.python.pegparser.sst.ConstantValue;
import com.oracle.graal.python.pegparser.sst.ConstantValue.Kind;
import com.oracle.graal.python.pegparser.sst.ExceptHandlerTy;
import com.oracle.graal.python.pegparser.sst.ExprTy;
import com.oracle.graal.python.pegparser.sst.KeywordTy;
import com.oracle.graal.python.pegparser.sst.MatchCaseTy;
import com.oracle.graal.python.pegparser.sst.ModTy;
import com.oracle.graal.python.pegparser.sst.PatternTy;
import com.oracle.graal.python.pegparser.sst.SSTNode;
import com.oracle.graal.python.pegparser.sst.SSTreeVisitor;
import com.oracle.graal.python.pegparser.sst.StmtTy;
import com.oracle.graal.python.pegparser.sst.StmtTy.TypeAlias;
import com.oracle.graal.python.pegparser.sst.TypeIgnoreTy;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.ParamSpec;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.TypeVar;
import com.oracle.graal.python.pegparser.sst.TypeParamTy.TypeVarTuple;
import com.oracle.graal.python.pegparser.sst.WithItemTy;
import com.oracle.graal.python.runtime.formatting.ComplexFormatter;
import com.oracle.graal.python.runtime.formatting.FloatFormatter;
import com.oracle.graal.python.runtime.formatting.InternalFormat.Spec;
import com.oracle.truffle.api.strings.TruffleString;
import com.oracle.truffle.api.strings.TruffleStringBuilder;
public class Unparser implements SSTreeVisitor<Void> {
public static TruffleString unparse(SSTNode node) {
return unparse(node, PR_TEST);
}
private static TruffleString unparse(SSTNode node, int level) {
TruffleStringBuilder builder = TruffleStringBuilder.create(TS_ENCODING);
node.accept(new Unparser(builder, level));
return builder.toStringUncached();
}
private Unparser(TruffleStringBuilder builder, int level) {
this.builder = builder;
this.level = level;
}
private TruffleStringBuilder builder;
private int level;
private static final int PR_TUPLE = 0;
private static final int PR_TEST = 1; /* 'if'-'else', 'lambda' */
private static final int PR_OR = 2; /* 'or' */
private static final int PR_AND = 3; /* 'and' */
private static final int PR_NOT = 4; /* 'not' */
private static final int PR_CMP = 5; /*
* '<', '>', '==', '>=', '<=', '!=', 'in', 'not in', 'is',
* 'is not'
*/
private static final int PR_EXPR = 6;
private static final int PR_BOR = PR_EXPR; /* '|' */
private static final int PR_BXOR = 7; /* '^' */
private static final int PR_BAND = 8; /* '&' */
private static final int PR_SHIFT = 9; /* '<<', '>>' */
private static final int PR_ARITH = 10; /* '+', '-' */
private static final int PR_TERM = 11; /* '*', '@', '/', '%', '//' */
private static final int PR_FACTOR = 12; /* unary '+', '-', '~' */
private static final int PR_POWER = 13; /* '**' */
private static final int PR_AWAIT = 14; /* 'await' */
private static final int PR_ATOM = 15;
private void appendStr(TruffleString temp_fv_str) {
builder.appendStringUncached(temp_fv_str);
}
private void appendStr(String s) {
appendStr(TruffleString.fromJavaStringUncached(s, TS_ENCODING));
}
private void appendStrIf(boolean cond, String s) {
if (cond) {
appendStr(s);
}
}
private void appendExpr(SSTNode node, int newLevel) {
int savedLevel = this.level;
this.level = newLevel;
node.accept(this);
this.level = savedLevel;
}
int len(SSTNode[] a) {
return a != null ? a.length : 0;
}
private static final TruffleString OPEN_BR = tsLiteral("{");
private static final TruffleString DBL_OPEN_BR = tsLiteral("{{");
private static final TruffleString CLOSE_BR = tsLiteral("}");
private static final TruffleString DBL_CLOSE_BR = tsLiteral("}}");
private static TruffleString escapeBraces(TruffleString s) {
StringNodes.StringReplaceNode replace = StringNodes.StringReplaceNode.getUncached();
TruffleString t = replace.execute(s, OPEN_BR, DBL_OPEN_BR, -1);
return replace.execute(t, CLOSE_BR, DBL_CLOSE_BR, -1);
}
private void appendFString(TruffleString string) {
appendStr(escapeBraces(string));
}
private void appendFStringElement(ExprTy e, boolean isFormatSpec) {
if (e instanceof ExprTy.Constant c) {
appendFString(codePointsToTruffleString(c.value.getCodePoints()));
} else if (e instanceof ExprTy.JoinedStr) {
appendJoinedStr((ExprTy.JoinedStr) e, isFormatSpec);
} else if (e instanceof ExprTy.FormattedValue) {
visit((ExprTy.FormattedValue) e);
} else {
throw new IllegalStateException("unknown expression kind inside f-string");
}
}
public TruffleString buildFStringBody(ExprTy[] values, boolean isFormatSpec) {
TruffleStringBuilder savedBuilder = builder;
builder = TruffleStringBuilder.create(TS_ENCODING);
for (int i = 0; i < values.length; i++) {
appendFStringElement(values[i], isFormatSpec);
}
TruffleString result = builder.toStringUncached();
builder = savedBuilder;
return result;
}
private void appendJoinedStr(ExprTy.JoinedStr node, boolean isFormatSpec) {
TruffleString body = buildFStringBody(node.values, isFormatSpec);
if (!isFormatSpec) {
appendStr("f");
appendStr(StringNodes.StringReprNode.getUncached().execute(body));
} else {
appendStr(body);
}
}
@Override
public Void visit(AliasTy node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(ArgTy node) {
appendStr(node.arg);
if (node.annotation != null) {
appendStr(": ");
appendExpr(node.annotation, PR_TEST);
}
return null;
}
@Override
public Void visit(ArgumentsTy args) {
boolean first = true;
int argCount, posonlyargCount, defaultCount;
/* positional-only and positional arguments with defaults */
posonlyargCount = len(args.posOnlyArgs);
argCount = len(args.args);
defaultCount = len(args.defaults);
for (int i = 0; i < posonlyargCount + argCount; i++) {
if (first) {
first = false;
} else {
appendStr(", ");
}
if (i < posonlyargCount) {
visit(args.posOnlyArgs[i]);
} else {
visit(args.args[i - posonlyargCount]);
}
int di = i - posonlyargCount - argCount + defaultCount;
if (di >= 0) {
appendStr("=");
appendExpr(args.defaults[di], PR_TEST);
}
if (posonlyargCount > 0 && i + 1 == posonlyargCount) {
appendStr(", /");
}
}
/* vararg, or bare '*' if no varargs but keyword-only arguments present */
if (args.varArg != null || len(args.kwOnlyArgs) > 0) {
if (first) {
first = false;
} else {
appendStr(", ");
}
appendStr("*");
if (args.varArg != null) {
visit(args.varArg);
}
}
/* keyword-only arguments */
argCount = len(args.kwOnlyArgs);
defaultCount = len(args.kwDefaults);
for (int i = 0; i < argCount; i++) {
if (first) {
first = false;
} else {
appendStr(", ");
}
visit(args.kwOnlyArgs[i]);
int di = i - argCount + defaultCount;
if (di >= 0) {
ExprTy kwDefault = args.kwDefaults[di];
if (kwDefault != null) {
appendStr("=");
appendExpr(kwDefault, PR_TEST);
}
}
}
/* **kwargs */
if (args.kwArg != null) {
if (!first) {
appendStr(", ");
}
appendStr("**");
visit(args.kwArg);
}
return null;
}
@Override
public Void visit(ComprehensionTy node) {
appendStr(node.isAsync ? " async for " : " for ");
appendExpr(node.target, PR_TUPLE);
appendStr(" in ");
appendExpr(node.iter, PR_TEST + 1);
int ifCount = len(node.ifs);
for (int i = 0; i < ifCount; i++) {
appendStr(" if ");
appendExpr(node.ifs[i], PR_TEST + 1);
}
return null;
}
private void appendComprehensions(ComprehensionTy[] comprehensions) {
for (int i = 0; i < comprehensions.length; i++) {
visit(comprehensions[i]);
}
}
@Override
public Void visit(ExprTy.Attribute node) {
String period;
ExprTy v = node.value;
appendExpr(v, PR_ATOM);
/*
* Special case: integers require a space for attribute access to be unambiguous.
*/
if (v instanceof ExprTy.Constant && (((ExprTy.Constant) v).value.kind == ConstantValue.Kind.LONG || ((ExprTy.Constant) v).value.kind == ConstantValue.Kind.BIGINTEGER)) {
period = " .";
} else {
period = ".";
}
appendStr(period);
appendStr(node.attr);
return null;
}
@Override
public Void visit(ExprTy.Await node) {
appendStrIf(level > PR_AWAIT, "(");
appendStr("await ");
appendExpr(node.value, PR_ATOM);
appendStrIf(level > PR_AWAIT, ")");
return null;
}
@Override
public Void visit(ExprTy.BinOp node) {
int pr;
String op;
boolean rassoc = false;
switch (node.op) {
case Add:
op = " + ";
pr = PR_ARITH;
break;
case Sub:
op = " - ";
pr = PR_ARITH;
break;
case Mult:
op = " * ";
pr = PR_TERM;
break;
case MatMult:
op = " @ ";
pr = PR_TERM;
break;
case Div:
op = " / ";
pr = PR_TERM;
break;
case Mod:
op = " % ";
pr = PR_TERM;
break;
case LShift:
op = " << ";
pr = PR_SHIFT;
break;
case RShift:
op = " >> ";
pr = PR_SHIFT;
break;
case BitOr:
op = " | ";
pr = PR_BOR;
break;
case BitXor:
op = " ^ ";
pr = PR_BXOR;
break;
case BitAnd:
op = " & ";
pr = PR_BAND;
break;
case FloorDiv:
op = " // ";
pr = PR_TERM;
break;
case Pow:
op = " ** ";
pr = PR_POWER;
rassoc = true;
break;
default:
throw new IllegalStateException("unknown binary operator");
}
appendStrIf(level > pr, "(");
appendExpr(node.left, pr + (rassoc ? 1 : 0));
appendStr(op);
appendExpr(node.right, pr + (rassoc ? 0 : 1));
appendStrIf(level > pr, ")");
return null;
}
@Override
public Void visit(ExprTy.BoolOp node) {
String op = node.op == BoolOpTy.And ? " and " : " or ";
int pr = node.op == BoolOpTy.And ? PR_AND : PR_OR;
appendStrIf(level > pr, "(");
for (int i = 0; i < node.values.length; i++) {
appendStrIf(i > 0, op);
appendExpr(node.values[i], pr + 1);
}
appendStrIf(level > pr, ")");
return null;
}
@Override
public Void visit(ExprTy.Call node) {
appendExpr(node.func, PR_ATOM);
int argCount = len(node.args);
int kwCount = len(node.keywords);
if (argCount == 1 && kwCount == 0) {
ExprTy expr = node.args[0];
if (expr instanceof ExprTy.GeneratorExp) {
/* Special case: a single generator expression. */
return visit((ExprTy.GeneratorExp) expr);
}
}
appendStr("(");
boolean first = true;
for (int i = 0; i < argCount; i++) {
if (first) {
first = false;
} else {
appendStr(", ");
}
appendExpr(node.args[i], PR_TEST);
}
for (int i = 0; i < kwCount; i++) {
if (first) {
first = false;
} else {
appendStr(", ");
}
visit(node.keywords[i]);
}
appendStr(")");
return null;
}
@Override
public Void visit(ExprTy.Compare node) {
String op;
appendStrIf(level > PR_CMP, "(");
int comparatorCount = len(node.comparators);
assert (comparatorCount > 0);
assert (comparatorCount == node.ops.length);
appendExpr(node.left, PR_CMP + 1);
for (int i = 0; i < comparatorCount; i++) {
switch (node.ops[i]) {
case Eq:
op = " == ";
break;
case NotEq:
op = " != ";
break;
case Lt:
op = " < ";
break;
case LtE:
op = " <= ";
break;
case Gt:
op = " > ";
break;
case GtE:
op = " >= ";
break;
case Is:
op = " is ";
break;
case IsNot:
op = " is not ";
break;
case In:
op = " in ";
break;
case NotIn:
op = " not in ";
break;
default:
throw new IllegalStateException("unexpected comparison kind");
}
appendStr(op);
appendExpr(node.comparators[i], PR_CMP + 1);
}
appendStrIf(level > PR_CMP, ")");
return null;
}
@Override
public Void visit(ExprTy.Constant node) {
// equivalent of case Constant_kind in append_ast_expr
if (node.value.kind == Kind.ELLIPSIS) {
appendStr("...");
return null;
}
if (node.kind instanceof String) {
appendStr((String) node.kind);
} else if (node.kind instanceof byte[]) {
// This conversion of byte[] -> String might not be correct, but CPython crashes in this
// case so nobody cares and also "kind" should only be 'u' or b'u' if present.
appendStr(new String((byte[]) node.kind));
}
appendConstantValue(node.value);
return null;
}
private void appendConstantValue(ConstantValue value) {
switch (value.kind) {
case LONG:
builder.appendLongNumberUncached(value.getLong());
break;
case DOUBLE:
FloatFormatter f = new FloatFormatter(FloatBuiltins.StrNode.spec, null);
f.setMinFracDigits(1);
TruffleString result = f.format(value.getDouble()).getResult();
appendStr(result);
break;
case BOOLEAN:
appendStr(value.getBoolean() ? "True" : "False");
break;
case CODEPOINTS:
appendStr(StringNodes.StringReprNode.getUncached().execute(codePointsToTruffleString(value.getCodePoints())));
break;
case BIGINTEGER:
appendStr(value.getBigInteger().toString());
break;
case NONE:
appendStr("None");
break;
case BYTES:
byte[] bytes = value.getBytes();
BytesUtils.reprLoop(builder, bytes, bytes.length, TruffleStringBuilder.AppendCodePointNode.getUncached());
break;
case COMPLEX:
double[] num = value.getComplex();
ComplexFormatter formatter = new ComplexFormatter(new Spec(-1, Spec.NONE), null);
formatter.format(num[0], num[1]);
appendStr(formatter.pad().getResult());
break;
case ELLIPSIS:
appendStr("Ellipsis");
break;
case TUPLE:
appendTuple(value.getTupleElements());
break;
case FROZENSET:
appendFrozenset(value.getFrozensetElements());
break;
default:
throw new IllegalStateException("unknown constant kind");
}
}
private void appendTuple(ConstantValue[] values) {
appendStr("(");
for (int i = 0; i < values.length; ++i) {
appendStrIf(i > 0, ", ");
appendConstantValue(values[i]);
}
appendStrIf(values.length == 1, ",");
appendStr(")");
}
private void appendFrozenset(ConstantValue[] values) {
appendStr("frozenset(");
if (values.length > 0) {
appendStr("{");
for (int i = 0; i < values.length; ++i) {
appendStrIf(i > 0, ", ");
appendConstantValue(values[i]);
}
appendStr("}");
}
appendStr(")");
}
@Override
public Void visit(ExprTy.Dict node) {
appendStr("{");
for (int i = 0; i < len(node.values); i++) {
appendStrIf(i > 0, ", ");
ExprTy key_node = node.keys[i];
if (key_node != null) {
appendExpr(key_node, PR_TEST);
appendStr(": ");
appendExpr(node.values[i], PR_TEST);
} else {
appendStr("**");
appendExpr(node.values[i], PR_EXPR);
}
}
appendStr("}");
return null;
}
@Override
public Void visit(ExprTy.DictComp node) {
appendStr("{");
appendExpr(node.key, PR_TEST);
appendStr(": ");
appendExpr(node.value, PR_TEST);
appendComprehensions(node.generators);
appendStr("}");
return null;
}
@Override
public Void visit(ExprTy.FormattedValue node) {
String conversion;
String outerBrace = "{";
/*
* Grammar allows PR_TUPLE, but use >PR_TEST for adding parenthesis around a lambda with ':'
*/
TruffleString tempFvStr = unparse(node.value, PR_TEST + 1);
if (tempFvStr.codePointAtByteIndexUncached(0, TS_ENCODING) == '{') {
/*
* Expression starts with a brace, split it with a space from the outer one.
*/
outerBrace = "{ ";
}
appendStr(outerBrace);
appendStr(tempFvStr);
if (node.conversion > 0) {
switch (node.conversion) {
case 'a':
conversion = "!a";
break;
case 'r':
conversion = "!r";
break;
case 's':
conversion = "!s";
break;
default:
throw PRaiseNode.raiseStatic(null, PythonBuiltinClassType.SystemError, ErrorMessages.UNKNOWN_F_VALUE_CONVERSION_KIND);
}
appendStr(conversion);
}
if (node.formatSpec != null) {
appendStr(":");
appendFStringElement(node.formatSpec, true);
}
appendStr("}");
return null;
}
@Override
public Void visit(ExprTy.GeneratorExp node) {
appendStr("(");
appendExpr(node.element, PR_TEST);
appendComprehensions(node.generators);
appendStr(")");
return null;
}
@Override
public Void visit(ExprTy.IfExp node) {
appendStrIf(level > PR_TEST, "(");
appendExpr(node.body, PR_TEST + 1);
appendStr(" if ");
appendExpr(node.test, PR_TEST + 1);
appendStr(" else ");
appendExpr(node.orElse, PR_TEST);
appendStrIf(level > PR_TEST, ")");
return null;
}
@Override
public Void visit(ExprTy.JoinedStr node) {
appendJoinedStr(node, false);
return null;
}
@Override
public Void visit(ExprTy.Lambda node) {
appendStrIf(level > PR_TEST, "(");
int nPositional = len(node.args.args) + len(node.args.posOnlyArgs);
appendStr(nPositional > 0 ? "lambda " : "lambda");
visit(node.args);
appendStr(": ");
appendExpr(node.body, PR_TEST);
appendStrIf(level > PR_TEST, ")");
return null;
}
@Override
public Void visit(ExprTy.List node) {
appendStr("[");
for (int i = 0; i < len(node.elements); i++) {
appendStrIf(i > 0, ", ");
appendExpr(node.elements[i], PR_TEST);
}
appendStr("]");
return null;
}
@Override
public Void visit(ExprTy.ListComp node) {
appendStr("(");
appendExpr(node.element, PR_TEST);
appendComprehensions(node.generators);
appendStr(")");
return null;
}
@Override
public Void visit(ExprTy.Name node) {
appendStr(node.id);
return null;
}
@Override
public Void visit(ExprTy.NamedExpr node) {
appendStrIf(level > PR_TUPLE, "(");
appendExpr(node.target, PR_ATOM);
appendStr(" := ");
appendExpr(node.value, PR_ATOM);
appendStrIf(level > PR_TUPLE, ")");
return null;
}
@Override
public Void visit(ExprTy.Set node) {
appendStr("{");
for (int i = 0; i < len(node.elements); i++) {
appendStrIf(i > 0, ", ");
appendExpr(node.elements[i], PR_TEST);
}
appendStr("}");
return null;
}
@Override
public Void visit(ExprTy.SetComp node) {
appendStr("(");
appendExpr(node.element, PR_TEST);
appendComprehensions(node.generators);
appendStr(")");
return null;
}
@Override
public Void visit(ExprTy.Slice node) {
if (node.lower != null) {
appendExpr(node.lower, PR_TEST);
}
appendStr(":");
if (node.upper != null) {
appendExpr(node.upper, PR_TEST);
}
if (node.step != null) {
appendStr(":");
appendExpr(node.step, PR_TEST);
}
return null;
}
@Override
public Void visit(ExprTy.Starred node) {
appendStr("*");
appendExpr(node.value, PR_EXPR);
return null;
}
@Override
public Void visit(ExprTy.Subscript node) {
appendExpr(node.value, PR_ATOM);
appendStr("[");
appendExpr(node.slice, PR_TUPLE);
appendStr("]");
return null;
}
@Override
public Void visit(ExprTy.Tuple node) {
int elemCount = len(node.elements);
if (elemCount == 0) {
appendStr("()");
return null;
}
appendStrIf(level > PR_TUPLE, "(");
for (int i = 0; i < elemCount; i++) {
appendStrIf(i > 0, ", ");
appendExpr(node.elements[i], PR_TEST);
}
appendStrIf(elemCount == 1, ",");
appendStrIf(level > PR_TUPLE, ")");
return null;
}
@Override
public Void visit(ExprTy.UnaryOp node) {
String op;
int pr;
switch (node.op) {
case Invert:
op = "~";
pr = PR_FACTOR;
break;
case Not:
op = "not ";
pr = PR_NOT;
break;
case UAdd:
op = "+";
pr = PR_FACTOR;
break;
case USub:
op = "-";
pr = PR_FACTOR;
break;
default:
throw new IllegalStateException("unknown unary operator");
}
appendStrIf(level > pr, "(");
appendStr(op);
appendExpr(node.operand, pr);
appendStrIf(level > pr, ")");
return null;
}
@Override
public Void visit(ExprTy.Yield node) {
if (node.value == null) {
appendStr("(yield)");
return null;
}
appendStr("(yield ");
appendExpr(node.value, PR_TEST);
appendStr(")");
return null;
}
@Override
public Void visit(ExprTy.YieldFrom node) {
appendStr("(yield from ");
appendExpr(node.value, PR_TEST);
appendStr(")");
return null;
}
@Override
public Void visit(KeywordTy node) {
if (node.arg == null) {
appendStr("**");
} else {
appendStr(node.arg);
appendStr("=");
}
appendExpr(node.value, PR_TEST);
return null;
}
@Override
public Void visit(ModTy.Expression node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(ModTy.FunctionType node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(ModTy.Interactive node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(ModTy.Module node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(TypeIgnoreTy.TypeIgnore node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.AnnAssign node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Assert node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Assign node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.AsyncFor node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.AsyncFunctionDef node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.AsyncWith node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.AugAssign node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.ClassDef node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Delete node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Expr node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.For node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.FunctionDef node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Global node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.If node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Import node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.ImportFrom node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Match node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(MatchCaseTy node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchAs node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchClass node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchMapping node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchOr node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchSequence node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchSingleton node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchStar node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(PatternTy.MatchValue node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Nonlocal node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Raise node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Return node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Try node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.TryStar node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(ExceptHandlerTy.ExceptHandler node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.While node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.With node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(WithItemTy node) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Break aThis) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Continue aThis) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(StmtTy.Pass aThis) {
throw new IllegalStateException("unknown expression kind");
}
@Override
public Void visit(TypeAlias node) {
throw new IllegalStateException("unknown AST node");
}
@Override
public Void visit(TypeVar node) {
throw new IllegalStateException("unknown AST node");
}
@Override
public Void visit(ParamSpec node) {
throw new IllegalStateException("unknown AST node");
}
@Override
public Void visit(TypeVarTuple node) {
throw new IllegalStateException("unknown AST node");
}
}
|
googleapis/google-cloud-java | 35,260 | java-video-live-stream/proto-google-cloud-live-stream-v1/src/main/java/com/google/cloud/video/livestream/v1/DeleteChannelRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/livestream/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.video.livestream.v1;
/**
*
*
* <pre>
* Request message for "LivestreamService.DeleteChannel".
* </pre>
*
* Protobuf type {@code google.cloud.video.livestream.v1.DeleteChannelRequest}
*/
public final class DeleteChannelRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.livestream.v1.DeleteChannelRequest)
DeleteChannelRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteChannelRequest.newBuilder() to construct.
private DeleteChannelRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteChannelRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteChannelRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_DeleteChannelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_DeleteChannelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.DeleteChannelRequest.class,
com.google.cloud.video.livestream.v1.DeleteChannelRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* If the `force` field is set to the default value of `false`, you must
* delete all of a channel's events before you can delete the channel itself.
* If the field is set to `true`, requests to delete a channel also delete
* associated channel events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.livestream.v1.DeleteChannelRequest)) {
return super.equals(obj);
}
com.google.cloud.video.livestream.v1.DeleteChannelRequest other =
(com.google.cloud.video.livestream.v1.DeleteChannelRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.livestream.v1.DeleteChannelRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for "LivestreamService.DeleteChannel".
* </pre>
*
* Protobuf type {@code google.cloud.video.livestream.v1.DeleteChannelRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.livestream.v1.DeleteChannelRequest)
com.google.cloud.video.livestream.v1.DeleteChannelRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_DeleteChannelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_DeleteChannelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.DeleteChannelRequest.class,
com.google.cloud.video.livestream.v1.DeleteChannelRequest.Builder.class);
}
// Construct using com.google.cloud.video.livestream.v1.DeleteChannelRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_DeleteChannelRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.DeleteChannelRequest getDefaultInstanceForType() {
return com.google.cloud.video.livestream.v1.DeleteChannelRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.DeleteChannelRequest build() {
com.google.cloud.video.livestream.v1.DeleteChannelRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.DeleteChannelRequest buildPartial() {
com.google.cloud.video.livestream.v1.DeleteChannelRequest result =
new com.google.cloud.video.livestream.v1.DeleteChannelRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.video.livestream.v1.DeleteChannelRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.livestream.v1.DeleteChannelRequest) {
return mergeFrom((com.google.cloud.video.livestream.v1.DeleteChannelRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.livestream.v1.DeleteChannelRequest other) {
if (other == com.google.cloud.video.livestream.v1.DeleteChannelRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the channel resource, in the form of:
* `projects/{project}/locations/{location}/channels/{channelId}`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and the
* request times out. If you make the request again with the same request ID,
* the server can check if original operation with the same request ID was
* received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported `(00000000-0000-0000-0000-000000000000)`.
* </pre>
*
* <code>string request_id = 2;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* If the `force` field is set to the default value of `false`, you must
* delete all of a channel's events before you can delete the channel itself.
* If the field is set to `true`, requests to delete a channel also delete
* associated channel events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* If the `force` field is set to the default value of `false`, you must
* delete all of a channel's events before you can delete the channel itself.
* If the field is set to `true`, requests to delete a channel also delete
* associated channel events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If the `force` field is set to the default value of `false`, you must
* delete all of a channel's events before you can delete the channel itself.
* If the field is set to `true`, requests to delete a channel also delete
* associated channel events.
* </pre>
*
* <code>bool force = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.livestream.v1.DeleteChannelRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.livestream.v1.DeleteChannelRequest)
private static final com.google.cloud.video.livestream.v1.DeleteChannelRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.livestream.v1.DeleteChannelRequest();
}
public static com.google.cloud.video.livestream.v1.DeleteChannelRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteChannelRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteChannelRequest>() {
@java.lang.Override
public DeleteChannelRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteChannelRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteChannelRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.DeleteChannelRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 35,541 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/virtual/phases/ea/EffectsClosure.java | /*
* Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.virtual.phases.ea;
import java.util.ArrayList;
import java.util.List;
import org.graalvm.collections.EconomicMap;
import org.graalvm.collections.EconomicSet;
import org.graalvm.collections.Equivalence;
import org.graalvm.word.LocationIdentity;
import jdk.graal.compiler.core.common.GraalOptions;
import jdk.graal.compiler.core.common.cfg.BlockMap;
import jdk.graal.compiler.core.common.cfg.CFGLoop;
import jdk.graal.compiler.core.common.type.Stamp;
import jdk.graal.compiler.core.common.util.CompilationAlarm;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.DebugContext;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.debug.Indent;
import jdk.graal.compiler.graph.Node;
import jdk.graal.compiler.graph.NodeBitMap;
import jdk.graal.compiler.graph.NodeMap;
import jdk.graal.compiler.graph.iterators.NodeIterable;
import jdk.graal.compiler.nodes.AbstractMergeNode;
import jdk.graal.compiler.nodes.FixedNode;
import jdk.graal.compiler.nodes.FixedWithNextNode;
import jdk.graal.compiler.nodes.IfNode;
import jdk.graal.compiler.nodes.LogicConstantNode;
import jdk.graal.compiler.nodes.LogicNode;
import jdk.graal.compiler.nodes.LoopBeginNode;
import jdk.graal.compiler.nodes.LoopExitNode;
import jdk.graal.compiler.nodes.PhiNode;
import jdk.graal.compiler.nodes.ProxyNode;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.nodes.StructuredGraph.ScheduleResult;
import jdk.graal.compiler.nodes.ValueNode;
import jdk.graal.compiler.nodes.ValuePhiNode;
import jdk.graal.compiler.nodes.WithExceptionNode;
import jdk.graal.compiler.nodes.cfg.ControlFlowGraph;
import jdk.graal.compiler.nodes.cfg.HIRBlock;
import jdk.graal.compiler.nodes.extended.BoxNode;
import jdk.graal.compiler.nodes.util.GraphUtil;
import jdk.graal.compiler.nodes.virtual.AllocatedObjectNode;
import jdk.graal.compiler.nodes.virtual.CommitAllocationNode;
import jdk.graal.compiler.nodes.virtual.VirtualObjectNode;
import jdk.graal.compiler.options.OptionValues;
import jdk.graal.compiler.phases.graph.ReentrantBlockIterator;
import jdk.graal.compiler.phases.graph.ReentrantBlockIterator.BlockIteratorClosure;
import jdk.graal.compiler.phases.graph.ReentrantBlockIterator.LoopInfo;
public abstract class EffectsClosure<BlockT extends EffectsBlockState<BlockT>> extends EffectsPhase.Closure<BlockT> {
protected final ControlFlowGraph cfg;
protected final ScheduleResult schedule;
/**
* If a node has an alias, this means that it was replaced with another node during analysis.
* Nodes can be replaced by normal ("scalar") nodes, e.g., a LoadIndexedNode with a
* ConstantNode, or by virtual nodes, e.g., a NewInstanceNode with a VirtualInstanceNode. A node
* was replaced with a virtual value iff the alias is a subclass of VirtualObjectNode.
*
* This alias map exists only once and is not part of the block state, so that during iterative
* loop processing the alias of a node may be changed to another value.
*/
protected NodeMap<ValueNode> aliases;
/**
* This set allows for a quick check whether a node has inputs that were replaced with "scalar"
* values.
*/
private NodeBitMap hasScalarReplacedInputs;
/*
* TODO: if it was possible to introduce your own subclasses of Block and Loop, these maps would
* not be necessary. We could merge the GraphEffectsList logic into them.
*/
/**
* The effects accumulated during analysis of nodes. They may be cleared and re-filled during
* iterative loop processing.
*/
protected BlockMap<GraphEffectList> blockEffects;
/**
* Effects that can only be applied after the effects from within the loop have been applied and
* that must be applied before any effect from after the loop is applied. E.g., updating phis.
*/
protected EconomicMap<CFGLoop<HIRBlock>, GraphEffectList> loopMergeEffects = EconomicMap.create(Equivalence.IDENTITY);
/**
* The entry state of loops is needed when loop proxies are processed.
*/
private EconomicMap<LoopBeginNode, BlockT> loopEntryStates = EconomicMap.create(Equivalence.IDENTITY);
// Intended to be used by read-eliminating phases based on the effects phase.
protected EconomicMap<CFGLoop<HIRBlock>, LoopKillCache> loopLocationKillCache = EconomicMap.create(Equivalence.IDENTITY);
protected boolean changed;
protected final DebugContext debug;
/**
* The current execution mode: once we reach a maximum loop nest we stop further effects and
* only perform the minimal necessary operations.
*/
protected EffectsClosureMode currentMode;
public EffectsClosure(ScheduleResult schedule, ControlFlowGraph cfg) {
this.schedule = schedule;
this.cfg = cfg;
this.aliases = cfg.graph.createNodeMap();
this.hasScalarReplacedInputs = cfg.graph.createNodeBitMap();
this.blockEffects = new BlockMap<>(cfg);
this.debug = cfg.graph.getDebug();
for (HIRBlock block : cfg.getBlocks()) {
blockEffects.put(block, new GraphEffectList(debug));
}
this.currentMode = EffectsClosureMode.REGULAR_VIRTUALIZATION;
}
@Override
public boolean hasChanged() {
return changed;
}
@Override
public boolean needsApplyEffects() {
return true;
}
@Override
public void applyEffects() {
final StructuredGraph graph = cfg.graph;
final ArrayList<Node> obsoleteNodes = new ArrayList<>(0);
final ArrayList<GraphEffectList> effectList = new ArrayList<>();
/*
* Effects are applied during a ordered iteration over the blocks to apply them in the
* correct order, e.g., apply the effect that adds a node to the graph before the node is
* used.
*/
BlockIteratorClosure<Void> closure = new BlockIteratorClosure<>() {
@Override
protected Void getInitialState() {
return null;
}
private void apply(GraphEffectList effects) {
if (effects != null && !effects.isEmpty()) {
effectList.add(effects);
}
}
@Override
protected Void processBlock(HIRBlock block, Void currentState) {
apply(blockEffects.get(block));
return currentState;
}
@Override
protected Void merge(HIRBlock merge, List<Void> states) {
return null;
}
@Override
protected Void cloneState(Void oldState) {
return oldState;
}
@Override
protected List<Void> processLoop(CFGLoop<HIRBlock> loop, Void initialState) {
LoopInfo<Void> info = ReentrantBlockIterator.processLoop(this, loop, initialState);
apply(loopMergeEffects.get(loop));
return info.exitStates;
}
};
ReentrantBlockIterator.apply(closure, cfg.getStartBlock());
for (GraphEffectList effects : effectList) {
effects.apply(graph, obsoleteNodes, false);
}
/*
* Effects that modify the cfg (e.g., removing a branch for an if that got a constant
* condition) need to be performed after all other effects, because they change phi value
* indexes.
*/
for (GraphEffectList effects : effectList) {
effects.apply(graph, obsoleteNodes, true);
}
debug.dump(DebugContext.DETAILED_LEVEL, graph, "After applying effects");
assert VirtualUtil.assertNonReachable(graph, obsoleteNodes) : Assertions.errorMessage("obsolete nodes should not be reachable: ", obsoleteNodes);
final NodeBitMap unnusedNodes = new NodeBitMap(graph);
for (Node node : obsoleteNodes) {
if (node.isAlive() && node.hasNoUsages()) {
if (node instanceof FixedWithNextNode) {
assert ((FixedWithNextNode) node).next() == null;
}
node.replaceAtUsages(null);
if (GraphUtil.shouldKillUnused(node)) {
unnusedNodes.mark(node);
}
}
}
if (unnusedNodes.isNotEmpty()) {
GraphUtil.killAllWithUnusedFloatingInputs(unnusedNodes, false);
}
}
@Override
protected BlockT processBlock(HIRBlock block, BlockT state) {
if (!state.isDead()) {
GraphEffectList effects = blockEffects.get(block);
/*
* If we enter an if branch that is known to be unreachable, we mark it as dead and
* cease to do any more analysis on it. At merges, these dead branches will be ignored.
*/
if (block.getBeginNode().predecessor() instanceof IfNode) {
IfNode ifNode = (IfNode) block.getBeginNode().predecessor();
LogicNode condition = ifNode.condition();
Node alias = getScalarAlias(condition);
if (alias instanceof LogicConstantNode) {
LogicConstantNode constant = (LogicConstantNode) alias;
boolean isTrueSuccessor = block.getBeginNode() == ifNode.trueSuccessor();
if (constant.getValue() != isTrueSuccessor) {
state.markAsDead();
effects.killIfBranch(ifNode, constant.getValue());
return state;
}
}
}
OptionValues options = block.getBeginNode().getOptions();
if (GraalOptions.TraceEscapeAnalysis.getValue(block.getBeginNode().getOptions())) {
int predCount = block.getPredecessorCount();
HIRBlock[] pred = new HIRBlock[predCount];
for (int i = 0; i < predCount; i++) {
pred[i] = block.getPredecessorAt(i);
}
int succCount = block.getSuccessorCount();
HIRBlock[] succ = new HIRBlock[succCount];
for (int i = 0; i < succCount; i++) {
succ[i] = block.getSuccessorAt(i);
}
VirtualUtil.trace(options, debug, "\nBlock: %s, preds: %s, succ: %s (", block, pred, succ);
}
// a lastFixedNode is needed in case we want to insert fixed nodes
FixedWithNextNode lastFixedNode = null;
Iterable<? extends Node> nodes = schedule != null ? schedule.getBlockToNodesMap().get(block) : block.getNodes();
for (Node node : nodes) {
// reset the aliases (may be non-null due to iterative loop processing)
aliases.set(node, null);
if (node instanceof LoopExitNode) {
LoopExitNode loopExit = (LoopExitNode) node;
for (ProxyNode proxy : loopExit.proxies()) {
aliases.set(proxy, null);
changed |= processNode(proxy, state, effects, lastFixedNode) && isSignificantNode(node);
}
processLoopExit(loopExit, loopEntryStates.get(loopExit.loopBegin()), state, blockEffects.get(block));
}
HIRBlock exceptionEdgeToKill = node instanceof WithExceptionNode ? cfg.blockFor(((WithExceptionNode) node).exceptionEdge()) : null;
boolean lastNodeChanged = processNode(node, state, effects, lastFixedNode) && isSignificantNode(node);
changed |= lastNodeChanged;
if (lastNodeChanged && exceptionEdgeToKill != null) {
/*
* We deleted a exception node, per definition the exception edge died in that
* process, no need to process the exception edge
*/
if (state.exceptionEdgesToKill == null) {
state.exceptionEdgesToKill = EconomicSet.create();
}
state.exceptionEdgesToKill.add(exceptionEdgeToKill);
}
if (node instanceof FixedWithNextNode) {
lastFixedNode = (FixedWithNextNode) node;
}
if (state.isDead()) {
break;
}
}
VirtualUtil.trace(options, debug, ")\n end state: %s\n", state);
}
return state;
}
@Override
protected BlockT afterSplit(HIRBlock successor, BlockT oldState) {
BlockT state = oldState;
if (oldState.exceptionEdgesToKill != null && oldState.exceptionEdgesToKill.contains(successor)) {
state.markAsDead();
}
return state;
}
/**
* Changes to {@link CommitAllocationNode}s, {@link AllocatedObjectNode}s and {@link BoxNode}s
* are not considered to be "important". If only changes to those nodes are discovered during
* analysis, the effects need not be applied.
*/
private static boolean isSignificantNode(Node node) {
return !(node instanceof CommitAllocationNode || node instanceof AllocatedObjectNode || node instanceof BoxNode);
}
enum EffectsClosureMode {
/**
* Regular processing of virtualizable nodes, i.e., try to virtualize them if possible.
*/
REGULAR_VIRTUALIZATION,
/**
* Stop trying to virtualize allocations since the maximum loop nesting level is reached.
*/
STOP_NEW_VIRTUALIZATIONS_LOOP_NEST,
/**
* Immediately materialize all virtual allocations after virtualization to avoid
* re-iterating loops during PEA.
*/
MATERIALIZE_ALL
}
/**
* Collects the effects of virtualizing the given node.
*
* @return {@code true} if the effects include removing the node, {@code false} otherwise.
*/
protected abstract boolean processNode(Node node, BlockT state, GraphEffectList effects, FixedWithNextNode lastFixedNode);
@Override
protected BlockT merge(HIRBlock merge, List<BlockT> states) {
assert blockEffects.get(merge).isEmpty();
MergeProcessor processor = createMergeProcessor(merge);
doMergeWithoutDead(processor, states);
blockEffects.get(merge).addAll(processor.mergeEffects);
blockEffects.get(merge).addAll(processor.afterMergeEffects);
return processor.newState;
}
@Override
@SuppressWarnings("try")
protected final List<BlockT> processLoop(CFGLoop<HIRBlock> loop, BlockT initialState) {
final StructuredGraph graph = loop.getHeader().getBeginNode().graph();
if (initialState.isDead()) {
ArrayList<BlockT> states = new ArrayList<>();
for (int i = 0; i < loop.getLoopExits().size(); i++) {
states.add(initialState);
}
return states;
}
/*
* Special case nested loops: To avoid an exponential runtime for nested loops we try to
* only process them as little times as possible.
*
* In the first iteration of an outer most loop we go into the inner most loop(s). We run
* the first iteration of the inner most loop and then, if necessary, a second iteration.
*
* We return from the recursion and finish the first iteration of the outermost loop. If we
* have to do a second iteration in the outer most loop we go again into the inner most
* loop(s) but this time we already know all states that are killed by the loop so inside
* the loop we will only have those changes that propagate from the first iteration of the
* outer most loop into the current loop. We strip the initial loop state for the inner most
* loops and do the first iteration with the (possible) changes from outer loops. If there
* are no changes we only have to do 1 iteration and are done.
*
* However, the stripping in the innermost loop(s) is only done for new allocations, i.e.
* every allocation reached after the loop depth filter is automatically materialized. If we
* reach an outer loop's allocation that is still virtual in an inner loop with depth >
* cutOff, and this virtualized allocation is materialized in the inner loop we throw an
* exception and re-do the entire loop nest and materialize everything
*
*/
BlockT initialStateRemovedKilledLocations = stripKilledLoopLocations(loop, cloneState(initialState));
NodeMap<ValueNode> aliasesCopy = null;
NodeBitMap hasScalarReplacedInputsCopy = null;
BlockMap<GraphEffectList> blockEffectsCopy = null;
EconomicMap<CFGLoop<HIRBlock>, GraphEffectList> loopMergeEffectsCopy = null;
EconomicMap<LoopBeginNode, BlockT> loopEntryStatesCopy = null;
EconomicMap<CFGLoop<HIRBlock>, LoopKillCache> loopLocationKillCacheCopy = null;
BlockT initialStateRemovedKilledLocationsBackup = null;
if (loop.getDepth() == 1) {
/*
* Find out if we will need the copy versions
*/
boolean initBackUp = false;
for (CFGLoop<HIRBlock> l : cfg.getLoops()) {
if (l.getDepth() > GraalOptions.EscapeAnalysisLoopCutoff.getValue(cfg.graph.getOptions())) {
initBackUp = true;
break;
}
}
if (initBackUp) {
initialStateRemovedKilledLocationsBackup = cloneState(initialStateRemovedKilledLocations);
aliasesCopy = new NodeMap<>(aliases);
hasScalarReplacedInputsCopy = hasScalarReplacedInputs.copy();
blockEffectsCopy = new BlockMap<>(cfg);
for (HIRBlock block : cfg.getBlocks()) {
GraphEffectList copy = new GraphEffectList(debug);
copy.addAll(blockEffects.get(block));
blockEffectsCopy.put(block, copy);
}
loopMergeEffectsCopy = EconomicMap.create(Equivalence.IDENTITY);
loopMergeEffectsCopy.putAll(loopMergeEffects);
loopEntryStatesCopy = EconomicMap.create(Equivalence.IDENTITY);
loopEntryStatesCopy.putAll(loopEntryStates);
loopLocationKillCacheCopy = EconomicMap.create(Equivalence.IDENTITY);
loopLocationKillCacheCopy.putAll(loopLocationKillCache);
}
}
boolean tooManyIterationsSeen = false;
while (true) { // // TERMINATION ARGUMENT: bound by number of basic blocks and iterative
// loop traversal
CompilationAlarm.checkProgress(cfg.graph);
try {
BlockT loopEntryState = initialStateRemovedKilledLocations;
BlockT lastMergedState = cloneState(initialStateRemovedKilledLocations);
processInitialLoopState(loop, lastMergedState);
MergeProcessor mergeProcessor = createMergeProcessor(loop.getHeader());
/*
* Iterative loop processing: we take the predecessor state as the loop's starting
* state, processing the loop contents, merge the states of all loop ends, and check
* whether the resulting state is equal to the starting state. If it is, the loop
* processing has finished, if not, another iteration is needed.
*
* This processing converges because the merge processing always makes the starting
* state more generic, e.g., adding phis instead of non-phi values.
*/
boolean[] knownAliveLoopEnds = new boolean[loop.numBackedges()];
for (int iteration = 0; iteration < 10; iteration++) {
try (Indent i = debug.logAndIndent("================== Process Loop Effects Closure: block:%s begin node:%s iteration:%s", loop.getHeader(), loop.getHeader().getBeginNode(),
iteration)) {
LoopInfo<BlockT> info = ReentrantBlockIterator.processLoop(this, loop, cloneState(lastMergedState));
List<BlockT> states = new ArrayList<>();
states.add(initialStateRemovedKilledLocations);
states.addAll(info.endStates);
doMergeWithoutDead(mergeProcessor, states);
debug.log("MergeProcessor New State: %s", mergeProcessor.newState);
debug.log("===== vs.");
debug.log("Last Merged State: %s", lastMergedState);
if (mergeProcessor.newState.equivalentTo(lastMergedState)) {
blockEffects.get(loop.getHeader()).insertAll(mergeProcessor.mergeEffects, 0);
loopMergeEffects.put(loop, mergeProcessor.afterMergeEffects);
assert info.exitStates.size() == loop.getLoopExits().size() : Assertions.errorMessage(info, info.exitStates, loop, loop.getLoopExits());
loopEntryStates.put((LoopBeginNode) loop.getHeader().getBeginNode(), loopEntryState);
assert assertExitStatesNonEmpty(loop, info);
processKilledLoopLocations(loop, initialStateRemovedKilledLocations, mergeProcessor.newState);
if (currentMode != EffectsClosureMode.REGULAR_VIRTUALIZATION && loop.getDepth() == 1) {
/*
* We are done processing the loop nest with limited EA for nested
* objects deeper > level, switch back to normal mode.
*/
currentMode = EffectsClosureMode.REGULAR_VIRTUALIZATION;
}
return info.exitStates;
} else {
/*
* Check monotonicity: Once an iteration over the loop has determined
* that a certain loop end is reachable (the state at that end is
* alive), a later iteration must not conclude that that loop end is
* unreachable (the state is dead). This would mean that analysis
* information became more precise. But it can only become less precise
* as we try to converge towards a fixed point.
*/
GraalError.guarantee(info.endStates.size() == knownAliveLoopEnds.length,
"should have the same number of end states as loop ends: %s / %s",
info.endStates.size(), knownAliveLoopEnds.length);
int endIndex = 0;
for (BlockT endState : info.endStates) {
GraalError.guarantee(!(knownAliveLoopEnds[endIndex] && endState.isDead()),
"%s: monotonicity violated, state at loop end %s should remain alive but is dead: %s",
loop, endIndex, endState);
knownAliveLoopEnds[endIndex] |= !endState.isDead();
endIndex++;
}
lastMergedState = mergeProcessor.newState;
for (HIRBlock block : loop.getBlocks()) {
blockEffects.get(block).clear();
if (block.isLoopHeader()) {
final GraphEffectList loopEffects = loopMergeEffects.get(block.getLoop());
if (loopEffects != null) {
loopEffects.clear();
}
}
}
}
}
}
} catch (EffecsClosureOverflowException e) {
if (loop.getDepth() != 1) {
/*
* We are not yet at the outermost loop, we rethrow the error to actually exit
* ALL cases
*/
throw e;
}
/*
* We reached the outermost loop after having seen a loop nest operation that would
* cause exponential processing. Thus, we reset everything to before the loop and
* process the loop in a mode where we immediately materialize every virtualizable
* node in order to avoid any repetitive loop processing.
*/
assert aliases != aliasesCopy : aliasesCopy;
aliases = aliasesCopy;
hasScalarReplacedInputs = hasScalarReplacedInputsCopy;
assert blockEffects != blockEffectsCopy : "Mus";
blockEffects = blockEffectsCopy;
loopMergeEffects = loopMergeEffectsCopy;
loopEntryStates = loopEntryStatesCopy;
loopLocationKillCache = loopLocationKillCacheCopy;
initialStateRemovedKilledLocations = initialStateRemovedKilledLocationsBackup;
processStateBeforeLoopOnOverflow(initialStateRemovedKilledLocations, ((LoopBeginNode) loop.getHeader().getBeginNode()).forwardEnd(),
blockEffects.get(loop.getHeader().getPredecessorAt(0)));
currentMode = EffectsClosureMode.MATERIALIZE_ALL;
continue;
}
if (!tooManyIterationsSeen) {
tooManyIterationsSeen = true;
/*
* The first time we see that we did too many iterations we materialize everything
* before the loop and see if that fixes our problems.
*/
graph.getDebug().dump(DebugContext.VERY_DETAILED_LEVEL, graph, "Too many loop iterations for %s trying to materialize everything before loop and redo loop nest", loop);
currentMode = EffectsClosureMode.MATERIALIZE_ALL;
continue;
} else {
throw new GraalError("too many iterations at %s", loop);
}
}
}
static class EffecsClosureOverflowException extends RuntimeException {
private static final long serialVersionUID = 1;
}
@SuppressWarnings("unused")
protected void processStateBeforeLoopOnOverflow(BlockT initialState, FixedNode materializeBefore, GraphEffectList effects) {
}
@SuppressWarnings("unused")
protected BlockT stripKilledLoopLocations(CFGLoop<HIRBlock> loop, BlockT initialState) {
return initialState;
}
@SuppressWarnings("unused")
protected void processKilledLoopLocations(CFGLoop<HIRBlock> loop, BlockT initialState, BlockT mergedStates) {
// nothing to do
}
@SuppressWarnings("unused")
protected void processInitialLoopState(CFGLoop<HIRBlock> loop, BlockT initialState) {
// nothing to do
}
private void doMergeWithoutDead(MergeProcessor mergeProcessor, List<BlockT> states) {
int alive = 0;
for (BlockT state : states) {
if (!state.isDead()) {
alive++;
}
}
if (alive == 0) {
mergeProcessor.setNewState(states.get(0));
} else if (alive == states.size()) {
int[] stateIndexes = new int[states.size()];
for (int i = 0; i < stateIndexes.length; i++) {
stateIndexes[i] = i;
}
mergeProcessor.setStateIndexes(stateIndexes);
mergeProcessor.setNewState(getInitialState());
mergeProcessor.merge(states);
} else {
ArrayList<BlockT> aliveStates = new ArrayList<>(alive);
int[] stateIndexes = new int[alive];
for (int i = 0; i < states.size(); i++) {
if (!states.get(i).isDead()) {
stateIndexes[aliveStates.size()] = i;
aliveStates.add(states.get(i));
}
}
mergeProcessor.setStateIndexes(stateIndexes);
mergeProcessor.setNewState(getInitialState());
mergeProcessor.merge(aliveStates);
}
}
private boolean assertExitStatesNonEmpty(CFGLoop<HIRBlock> loop, LoopInfo<BlockT> info) {
for (int i = 0; i < loop.getLoopExits().size(); i++) {
assert info.exitStates.get(i) != null : "no loop exit state at " + loop.getLoopExits().get(i) + " / " + loop.getHeader();
}
return true;
}
protected abstract void processLoopExit(LoopExitNode exitNode, BlockT initialState, BlockT exitState, GraphEffectList effects);
protected abstract MergeProcessor createMergeProcessor(HIRBlock merge);
/**
* The main workhorse for merging states, both for loops and for normal merges.
*/
protected abstract class MergeProcessor {
protected final HIRBlock mergeBlock;
protected final AbstractMergeNode merge;
protected final GraphEffectList mergeEffects;
protected final GraphEffectList afterMergeEffects;
/**
* The indexes are used to map from an index in the list of active (non-dead) predecessors
* to an index in the list of all predecessors (the latter may be larger).
*/
private int[] stateIndexes;
protected BlockT newState;
public MergeProcessor(HIRBlock mergeBlock) {
this.mergeBlock = mergeBlock;
this.merge = (AbstractMergeNode) mergeBlock.getBeginNode();
this.mergeEffects = new GraphEffectList(debug);
this.afterMergeEffects = new GraphEffectList(debug);
}
/**
* @param states the states that should be merged.
*/
protected abstract void merge(List<BlockT> states);
private void setNewState(BlockT state) {
newState = state;
mergeEffects.clear();
afterMergeEffects.clear();
}
private void setStateIndexes(int[] stateIndexes) {
this.stateIndexes = stateIndexes;
}
protected final HIRBlock getPredecessor(int index) {
return mergeBlock.getPredecessorAt(stateIndexes[index]);
}
protected final NodeIterable<PhiNode> getPhis() {
return merge.phis();
}
protected final ValueNode getPhiValueAt(PhiNode phi, int index) {
return phi.valueAt(stateIndexes[index]);
}
protected final ValuePhiNode createValuePhi(Stamp stamp) {
ValuePhiNode valuePhi = new ValuePhiNode(stamp, merge, new ValueNode[mergeBlock.getPredecessorCount()]);
valuePhi.setNodeSourcePosition(merge.getNodeSourcePosition());
return valuePhi;
}
protected final void setPhiInput(PhiNode phi, int index, ValueNode value) {
afterMergeEffects.initializePhiInput(phi, stateIndexes[index], value);
}
protected final StructuredGraph graph() {
return merge.graph();
}
@Override
public String toString() {
return "MergeProcessor@" + merge;
}
}
public void addScalarAlias(ValueNode node, ValueNode alias) {
assert !(alias instanceof VirtualObjectNode) : "Must not be a virtual object node " + alias;
aliases.set(node, alias);
for (Node usage : node.usages()) {
if (!hasScalarReplacedInputs.isNew(usage)) {
hasScalarReplacedInputs.mark(usage);
}
}
}
protected final boolean hasScalarReplacedInputs(Node node) {
return hasScalarReplacedInputs.isMarked(node);
}
public ValueNode getScalarAlias(ValueNode node) {
assert !(node instanceof VirtualObjectNode) : node;
if (node == null || !node.isAlive() || aliases.isNew(node)) {
return node;
}
ValueNode result = aliases.get(node);
return (result == null || result instanceof VirtualObjectNode) ? node : result;
}
protected static final class LoopKillCache {
private int visits;
private LocationIdentity firstLocation;
private EconomicSet<LocationIdentity> killedLocations;
private boolean killsAll;
protected LoopKillCache(int visits) {
this.visits = visits;
}
protected void visited() {
visits++;
}
protected int visits() {
return visits;
}
protected void setKillsAll() {
killsAll = true;
firstLocation = null;
killedLocations = null;
}
protected boolean containsLocation(LocationIdentity locationIdentity) {
if (killsAll) {
return true;
}
if (firstLocation == null) {
return false;
}
if (!firstLocation.equals(locationIdentity)) {
return killedLocations != null ? killedLocations.contains(locationIdentity) : false;
}
return true;
}
protected void rememberLoopKilledLocation(LocationIdentity locationIdentity) {
if (killsAll) {
return;
}
if (firstLocation == null || firstLocation.equals(locationIdentity)) {
firstLocation = locationIdentity;
} else {
if (killedLocations == null) {
killedLocations = EconomicSet.create(Equivalence.IDENTITY);
}
killedLocations.add(locationIdentity);
}
}
protected boolean loopKillsLocations() {
if (killsAll) {
return true;
}
return firstLocation != null;
}
}
}
|
googleapis/google-cloud-java | 35,154 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/CreateMeshRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/mesh.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the CreateMesh method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.CreateMeshRequest}
*/
public final class CreateMeshRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.CreateMeshRequest)
CreateMeshRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateMeshRequest.newBuilder() to construct.
private CreateMeshRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateMeshRequest() {
parent_ = "";
meshId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateMeshRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.MeshProto
.internal_static_google_cloud_networkservices_v1_CreateMeshRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.MeshProto
.internal_static_google_cloud_networkservices_v1_CreateMeshRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.CreateMeshRequest.class,
com.google.cloud.networkservices.v1.CreateMeshRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MESH_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object meshId_ = "";
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The meshId.
*/
@java.lang.Override
public java.lang.String getMeshId() {
java.lang.Object ref = meshId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
meshId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for meshId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMeshIdBytes() {
java.lang.Object ref = meshId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
meshId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MESH_FIELD_NUMBER = 3;
private com.google.cloud.networkservices.v1.Mesh mesh_;
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>.google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the mesh field is set.
*/
@java.lang.Override
public boolean hasMesh() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>.google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The mesh.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.Mesh getMesh() {
return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_;
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>.google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.MeshOrBuilder getMeshOrBuilder() {
return mesh_ == null ? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance() : mesh_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(meshId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, meshId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getMesh());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(meshId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, meshId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getMesh());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.CreateMeshRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.CreateMeshRequest other =
(com.google.cloud.networkservices.v1.CreateMeshRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getMeshId().equals(other.getMeshId())) return false;
if (hasMesh() != other.hasMesh()) return false;
if (hasMesh()) {
if (!getMesh().equals(other.getMesh())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + MESH_ID_FIELD_NUMBER;
hash = (53 * hash) + getMeshId().hashCode();
if (hasMesh()) {
hash = (37 * hash) + MESH_FIELD_NUMBER;
hash = (53 * hash) + getMesh().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.CreateMeshRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the CreateMesh method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.CreateMeshRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.CreateMeshRequest)
com.google.cloud.networkservices.v1.CreateMeshRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.MeshProto
.internal_static_google_cloud_networkservices_v1_CreateMeshRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.MeshProto
.internal_static_google_cloud_networkservices_v1_CreateMeshRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.CreateMeshRequest.class,
com.google.cloud.networkservices.v1.CreateMeshRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.CreateMeshRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMeshFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
meshId_ = "";
mesh_ = null;
if (meshBuilder_ != null) {
meshBuilder_.dispose();
meshBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.MeshProto
.internal_static_google_cloud_networkservices_v1_CreateMeshRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateMeshRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.CreateMeshRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateMeshRequest build() {
com.google.cloud.networkservices.v1.CreateMeshRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateMeshRequest buildPartial() {
com.google.cloud.networkservices.v1.CreateMeshRequest result =
new com.google.cloud.networkservices.v1.CreateMeshRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.CreateMeshRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.meshId_ = meshId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.mesh_ = meshBuilder_ == null ? mesh_ : meshBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.CreateMeshRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.CreateMeshRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.CreateMeshRequest other) {
if (other == com.google.cloud.networkservices.v1.CreateMeshRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getMeshId().isEmpty()) {
meshId_ = other.meshId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasMesh()) {
mergeMesh(other.getMesh());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
meshId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getMeshFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the Mesh. Must be in the
* format `projects/*/locations/global`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object meshId_ = "";
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The meshId.
*/
public java.lang.String getMeshId() {
java.lang.Object ref = meshId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
meshId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for meshId.
*/
public com.google.protobuf.ByteString getMeshIdBytes() {
java.lang.Object ref = meshId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
meshId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The meshId to set.
* @return This builder for chaining.
*/
public Builder setMeshId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
meshId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearMeshId() {
meshId_ = getDefaultInstance().getMeshId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Short name of the Mesh resource to be created.
* </pre>
*
* <code>string mesh_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for meshId to set.
* @return This builder for chaining.
*/
public Builder setMeshIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
meshId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.networkservices.v1.Mesh mesh_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Mesh,
com.google.cloud.networkservices.v1.Mesh.Builder,
com.google.cloud.networkservices.v1.MeshOrBuilder>
meshBuilder_;
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the mesh field is set.
*/
public boolean hasMesh() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The mesh.
*/
public com.google.cloud.networkservices.v1.Mesh getMesh() {
if (meshBuilder_ == null) {
return mesh_ == null
? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance()
: mesh_;
} else {
return meshBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMesh(com.google.cloud.networkservices.v1.Mesh value) {
if (meshBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
mesh_ = value;
} else {
meshBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMesh(com.google.cloud.networkservices.v1.Mesh.Builder builderForValue) {
if (meshBuilder_ == null) {
mesh_ = builderForValue.build();
} else {
meshBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMesh(com.google.cloud.networkservices.v1.Mesh value) {
if (meshBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& mesh_ != null
&& mesh_ != com.google.cloud.networkservices.v1.Mesh.getDefaultInstance()) {
getMeshBuilder().mergeFrom(value);
} else {
mesh_ = value;
}
} else {
meshBuilder_.mergeFrom(value);
}
if (mesh_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMesh() {
bitField0_ = (bitField0_ & ~0x00000004);
mesh_ = null;
if (meshBuilder_ != null) {
meshBuilder_.dispose();
meshBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.Mesh.Builder getMeshBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getMeshFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.MeshOrBuilder getMeshOrBuilder() {
if (meshBuilder_ != null) {
return meshBuilder_.getMessageOrBuilder();
} else {
return mesh_ == null
? com.google.cloud.networkservices.v1.Mesh.getDefaultInstance()
: mesh_;
}
}
/**
*
*
* <pre>
* Required. Mesh resource to be created.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.Mesh mesh = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Mesh,
com.google.cloud.networkservices.v1.Mesh.Builder,
com.google.cloud.networkservices.v1.MeshOrBuilder>
getMeshFieldBuilder() {
if (meshBuilder_ == null) {
meshBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.Mesh,
com.google.cloud.networkservices.v1.Mesh.Builder,
com.google.cloud.networkservices.v1.MeshOrBuilder>(
getMesh(), getParentForChildren(), isClean());
mesh_ = null;
}
return meshBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.CreateMeshRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.CreateMeshRequest)
private static final com.google.cloud.networkservices.v1.CreateMeshRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.CreateMeshRequest();
}
public static com.google.cloud.networkservices.v1.CreateMeshRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateMeshRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateMeshRequest>() {
@java.lang.Override
public CreateMeshRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateMeshRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateMeshRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.CreateMeshRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,252 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/GenerativeInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/session.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* Represents the information of a query if handled by generative agent
* resources.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.GenerativeInfo}
*/
public final class GenerativeInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.GenerativeInfo)
GenerativeInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerativeInfo.newBuilder() to construct.
private GenerativeInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerativeInfo() {
currentPlaybooks_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GenerativeInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_GenerativeInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_GenerativeInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.class,
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.Builder.class);
}
private int bitField0_;
public static final int CURRENT_PLAYBOOKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList currentPlaybooks_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @return A list containing the currentPlaybooks.
*/
public com.google.protobuf.ProtocolStringList getCurrentPlaybooksList() {
return currentPlaybooks_;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @return The count of currentPlaybooks.
*/
public int getCurrentPlaybooksCount() {
return currentPlaybooks_.size();
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param index The index of the element to return.
* @return The currentPlaybooks at the given index.
*/
public java.lang.String getCurrentPlaybooks(int index) {
return currentPlaybooks_.get(index);
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the currentPlaybooks at the given index.
*/
public com.google.protobuf.ByteString getCurrentPlaybooksBytes(int index) {
return currentPlaybooks_.getByteString(index);
}
public static final int ACTION_TRACING_INFO_FIELD_NUMBER = 2;
private com.google.cloud.dialogflow.cx.v3beta1.Example actionTracingInfo_;
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*
* @return Whether the actionTracingInfo field is set.
*/
@java.lang.Override
public boolean hasActionTracingInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*
* @return The actionTracingInfo.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.Example getActionTracingInfo() {
return actionTracingInfo_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()
: actionTracingInfo_;
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ExampleOrBuilder getActionTracingInfoOrBuilder() {
return actionTracingInfo_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()
: actionTracingInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < currentPlaybooks_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, currentPlaybooks_.getRaw(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getActionTracingInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < currentPlaybooks_.size(); i++) {
dataSize += computeStringSizeNoTag(currentPlaybooks_.getRaw(i));
}
size += dataSize;
size += 1 * getCurrentPlaybooksList().size();
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getActionTracingInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo other =
(com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo) obj;
if (!getCurrentPlaybooksList().equals(other.getCurrentPlaybooksList())) return false;
if (hasActionTracingInfo() != other.hasActionTracingInfo()) return false;
if (hasActionTracingInfo()) {
if (!getActionTracingInfo().equals(other.getActionTracingInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCurrentPlaybooksCount() > 0) {
hash = (37 * hash) + CURRENT_PLAYBOOKS_FIELD_NUMBER;
hash = (53 * hash) + getCurrentPlaybooksList().hashCode();
}
if (hasActionTracingInfo()) {
hash = (37 * hash) + ACTION_TRACING_INFO_FIELD_NUMBER;
hash = (53 * hash) + getActionTracingInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents the information of a query if handled by generative agent
* resources.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.GenerativeInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.GenerativeInfo)
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_GenerativeInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_GenerativeInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.class,
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getActionTracingInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
currentPlaybooks_ = com.google.protobuf.LazyStringArrayList.emptyList();
actionTracingInfo_ = null;
if (actionTracingInfoBuilder_ != null) {
actionTracingInfoBuilder_.dispose();
actionTracingInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.SessionProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_GenerativeInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo build() {
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo result =
new com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
currentPlaybooks_.makeImmutable();
result.currentPlaybooks_ = currentPlaybooks_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionTracingInfo_ =
actionTracingInfoBuilder_ == null
? actionTracingInfo_
: actionTracingInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo other) {
if (other == com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo.getDefaultInstance())
return this;
if (!other.currentPlaybooks_.isEmpty()) {
if (currentPlaybooks_.isEmpty()) {
currentPlaybooks_ = other.currentPlaybooks_;
bitField0_ |= 0x00000001;
} else {
ensureCurrentPlaybooksIsMutable();
currentPlaybooks_.addAll(other.currentPlaybooks_);
}
onChanged();
}
if (other.hasActionTracingInfo()) {
mergeActionTracingInfo(other.getActionTracingInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensureCurrentPlaybooksIsMutable();
currentPlaybooks_.add(s);
break;
} // case 10
case 18:
{
input.readMessage(
getActionTracingInfoFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList currentPlaybooks_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureCurrentPlaybooksIsMutable() {
if (!currentPlaybooks_.isModifiable()) {
currentPlaybooks_ = new com.google.protobuf.LazyStringArrayList(currentPlaybooks_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @return A list containing the currentPlaybooks.
*/
public com.google.protobuf.ProtocolStringList getCurrentPlaybooksList() {
currentPlaybooks_.makeImmutable();
return currentPlaybooks_;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @return The count of currentPlaybooks.
*/
public int getCurrentPlaybooksCount() {
return currentPlaybooks_.size();
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param index The index of the element to return.
* @return The currentPlaybooks at the given index.
*/
public java.lang.String getCurrentPlaybooks(int index) {
return currentPlaybooks_.get(index);
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the currentPlaybooks at the given index.
*/
public com.google.protobuf.ByteString getCurrentPlaybooksBytes(int index) {
return currentPlaybooks_.getByteString(index);
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param index The index to set the value at.
* @param value The currentPlaybooks to set.
* @return This builder for chaining.
*/
public Builder setCurrentPlaybooks(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCurrentPlaybooksIsMutable();
currentPlaybooks_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param value The currentPlaybooks to add.
* @return This builder for chaining.
*/
public Builder addCurrentPlaybooks(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCurrentPlaybooksIsMutable();
currentPlaybooks_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param values The currentPlaybooks to add.
* @return This builder for chaining.
*/
public Builder addAllCurrentPlaybooks(java.lang.Iterable<java.lang.String> values) {
ensureCurrentPlaybooksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, currentPlaybooks_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCurrentPlaybooks() {
currentPlaybooks_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The stack of [playbooks][google.cloud.dialogflow.cx.v3beta1.Playbook] that
* the conversation has currently entered, with the most recent one on the
* top.
* </pre>
*
* <code>repeated string current_playbooks = 1;</code>
*
* @param value The bytes of the currentPlaybooks to add.
* @return This builder for chaining.
*/
public Builder addCurrentPlaybooksBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureCurrentPlaybooksIsMutable();
currentPlaybooks_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.dialogflow.cx.v3beta1.Example actionTracingInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Example,
com.google.cloud.dialogflow.cx.v3beta1.Example.Builder,
com.google.cloud.dialogflow.cx.v3beta1.ExampleOrBuilder>
actionTracingInfoBuilder_;
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*
* @return Whether the actionTracingInfo field is set.
*/
public boolean hasActionTracingInfo() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*
* @return The actionTracingInfo.
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example getActionTracingInfo() {
if (actionTracingInfoBuilder_ == null) {
return actionTracingInfo_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()
: actionTracingInfo_;
} else {
return actionTracingInfoBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public Builder setActionTracingInfo(com.google.cloud.dialogflow.cx.v3beta1.Example value) {
if (actionTracingInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
actionTracingInfo_ = value;
} else {
actionTracingInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public Builder setActionTracingInfo(
com.google.cloud.dialogflow.cx.v3beta1.Example.Builder builderForValue) {
if (actionTracingInfoBuilder_ == null) {
actionTracingInfo_ = builderForValue.build();
} else {
actionTracingInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public Builder mergeActionTracingInfo(com.google.cloud.dialogflow.cx.v3beta1.Example value) {
if (actionTracingInfoBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& actionTracingInfo_ != null
&& actionTracingInfo_
!= com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()) {
getActionTracingInfoBuilder().mergeFrom(value);
} else {
actionTracingInfo_ = value;
}
} else {
actionTracingInfoBuilder_.mergeFrom(value);
}
if (actionTracingInfo_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public Builder clearActionTracingInfo() {
bitField0_ = (bitField0_ & ~0x00000002);
actionTracingInfo_ = null;
if (actionTracingInfoBuilder_ != null) {
actionTracingInfoBuilder_.dispose();
actionTracingInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example.Builder getActionTracingInfoBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getActionTracingInfoFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.ExampleOrBuilder getActionTracingInfoOrBuilder() {
if (actionTracingInfoBuilder_ != null) {
return actionTracingInfoBuilder_.getMessageOrBuilder();
} else {
return actionTracingInfo_ == null
? com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()
: actionTracingInfo_;
}
}
/**
*
*
* <pre>
* The actions performed by the generative playbook for the current agent
* response.
* </pre>
*
* <code>.google.cloud.dialogflow.cx.v3beta1.Example action_tracing_info = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Example,
com.google.cloud.dialogflow.cx.v3beta1.Example.Builder,
com.google.cloud.dialogflow.cx.v3beta1.ExampleOrBuilder>
getActionTracingInfoFieldBuilder() {
if (actionTracingInfoBuilder_ == null) {
actionTracingInfoBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.Example,
com.google.cloud.dialogflow.cx.v3beta1.Example.Builder,
com.google.cloud.dialogflow.cx.v3beta1.ExampleOrBuilder>(
getActionTracingInfo(), getParentForChildren(), isClean());
actionTracingInfo_ = null;
}
return actionTracingInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.GenerativeInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.GenerativeInfo)
private static final com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo();
}
public static com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerativeInfo> PARSER =
new com.google.protobuf.AbstractParser<GenerativeInfo>() {
@java.lang.Override
public GenerativeInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenerativeInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerativeInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.GenerativeInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka | 35,498 | test-common/test-common-runtime/src/main/java/org/apache/kafka/common/test/KafkaClusterTestKit.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.test;
import kafka.raft.KafkaRaftManager;
import kafka.server.BrokerServer;
import kafka.server.ControllerServer;
import kafka.server.FaultHandlerFactory;
import kafka.server.KafkaConfig;
import kafka.server.KafkaRaftServer;
import kafka.server.SharedServer;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.config.internals.BrokerSecurityConfigs;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.network.ListenerName;
import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.apache.kafka.common.test.api.TestKitDefaults;
import org.apache.kafka.common.utils.ThreadUtils;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.controller.Controller;
import org.apache.kafka.metadata.authorizer.StandardAuthorizer;
import org.apache.kafka.metadata.properties.MetaPropertiesEnsemble;
import org.apache.kafka.metadata.storage.Formatter;
import org.apache.kafka.network.SocketServerConfigs;
import org.apache.kafka.raft.DynamicVoters;
import org.apache.kafka.raft.MetadataLogConfig;
import org.apache.kafka.raft.QuorumConfig;
import org.apache.kafka.server.common.ApiMessageAndVersion;
import org.apache.kafka.server.config.KRaftConfigs;
import org.apache.kafka.server.config.ServerConfigs;
import org.apache.kafka.server.fault.FaultHandler;
import org.apache.kafka.storage.internals.log.CleanerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static org.apache.kafka.server.config.ReplicationConfigs.INTER_BROKER_LISTENER_NAME_CONFIG;
import static org.apache.kafka.server.config.ServerLogConfigs.LOG_DIRS_CONFIG;
public class KafkaClusterTestKit implements AutoCloseable {
private static final Logger log = LoggerFactory.getLogger(KafkaClusterTestKit.class);
static class SimpleFaultHandlerFactory implements FaultHandlerFactory {
private final MockFaultHandler fatalFaultHandler = new MockFaultHandler("fatalFaultHandler");
private final MockFaultHandler nonFatalFaultHandler = new MockFaultHandler("nonFatalFaultHandler");
MockFaultHandler fatalFaultHandler() {
return fatalFaultHandler;
}
MockFaultHandler nonFatalFaultHandler() {
return nonFatalFaultHandler;
}
@Override
public FaultHandler build(String name, boolean fatal, Runnable action) {
if (fatal) {
return fatalFaultHandler;
} else {
return nonFatalFaultHandler;
}
}
}
public static class Builder {
private final TestKitNodes nodes;
private final Map<String, Object> configProps = new HashMap<>();
private final SimpleFaultHandlerFactory faultHandlerFactory = new SimpleFaultHandlerFactory();
private final PreboundSocketFactoryManager socketFactoryManager = new PreboundSocketFactoryManager();
private final String brokerListenerName;
private final String controllerListenerName;
private final String brokerSecurityProtocol;
private final String controllerSecurityProtocol;
private boolean standalone;
private Optional<Map<Integer, Uuid>> initialVoterSet = Optional.empty();
private boolean deleteOnClose;
public Builder(TestKitNodes nodes) {
this.nodes = nodes;
this.brokerListenerName = nodes.brokerListenerName().value();
this.controllerListenerName = nodes.controllerListenerName().value();
this.brokerSecurityProtocol = nodes.brokerListenerProtocol().name;
this.controllerSecurityProtocol = nodes.controllerListenerProtocol().name;
this.deleteOnClose = true;
}
public Builder setConfigProp(String key, Object value) {
this.configProps.put(key, value);
return this;
}
public Builder setStandalone(boolean standalone) {
this.standalone = standalone;
return this;
}
public Builder setInitialVoterSet(Map<Integer, Uuid> initialVoterSet) {
this.initialVoterSet = Optional.of(initialVoterSet);
return this;
}
private KafkaConfig createNodeConfig(TestKitNode node) throws IOException {
TestKitNode brokerNode = nodes.brokerNodes().get(node.id());
TestKitNode controllerNode = nodes.controllerNodes().get(node.id());
Map<String, Object> props = new HashMap<>(configProps);
props.put(KRaftConfigs.SERVER_MAX_STARTUP_TIME_MS_CONFIG,
Long.toString(TimeUnit.MINUTES.toMillis(10)));
props.put(KRaftConfigs.PROCESS_ROLES_CONFIG, roles(node.id()));
props.put(KRaftConfigs.NODE_ID_CONFIG,
Integer.toString(node.id()));
// In combined mode, always prefer the metadata log directory of the controller node.
if (controllerNode != null) {
props.put(MetadataLogConfig.METADATA_LOG_DIR_CONFIG,
controllerNode.metadataDirectory());
setSecurityProtocolProps(props, controllerSecurityProtocol);
} else {
props.put(MetadataLogConfig.METADATA_LOG_DIR_CONFIG,
node.metadataDirectory());
}
if (brokerNode != null) {
// Set the log.dirs according to the broker node setting (if there is a broker node)
props.put(LOG_DIRS_CONFIG,
String.join(",", brokerNode.logDataDirectories()));
setSecurityProtocolProps(props, brokerSecurityProtocol);
} else {
// Set log.dirs equal to the metadata directory if there is just a controller.
props.put(LOG_DIRS_CONFIG,
controllerNode.metadataDirectory());
}
// We allow configuring the listeners and related properties via Builder::setConfigProp,
// and they shouldn't be overridden here
props.putIfAbsent(SocketServerConfigs.LISTENER_SECURITY_PROTOCOL_MAP_CONFIG, String.format("%s:%s,%s:%s",
brokerListenerName, brokerSecurityProtocol, controllerListenerName, controllerSecurityProtocol));
props.putIfAbsent(SocketServerConfigs.LISTENERS_CONFIG, listeners(node.id()));
props.putIfAbsent(INTER_BROKER_LISTENER_NAME_CONFIG, brokerListenerName);
props.putIfAbsent(KRaftConfigs.CONTROLLER_LISTENER_NAMES_CONFIG, controllerListenerName);
if (!standalone && initialVoterSet.isEmpty()) {
StringBuilder quorumVoterStringBuilder = new StringBuilder();
String prefix = "";
for (int nodeId : nodes.controllerNodes().keySet()) {
quorumVoterStringBuilder.append(prefix).
append(nodeId).
append("@").
append("localhost").
append(":").
append(socketFactoryManager.getOrCreatePortForListener(nodeId, controllerListenerName));
prefix = ",";
}
props.put(QuorumConfig.QUORUM_VOTERS_CONFIG, quorumVoterStringBuilder.toString());
} else {
StringBuilder bootstrapServersStringBuilder = new StringBuilder();
String prefix = "";
for (int nodeId : nodes.controllerNodes().keySet()) {
bootstrapServersStringBuilder.append(prefix).
append("localhost").
append(":").
append(socketFactoryManager.getOrCreatePortForListener(nodeId, controllerListenerName));
prefix = ",";
}
props.put(QuorumConfig.QUORUM_BOOTSTRAP_SERVERS_CONFIG, bootstrapServersStringBuilder.toString());
}
// reduce log cleaner offset map memory usage
props.putIfAbsent(CleanerConfig.LOG_CLEANER_DEDUPE_BUFFER_SIZE_PROP, "2097152");
// do not include auto join config in broker nodes
if (brokerNode != null) {
props.remove(QuorumConfig.QUORUM_AUTO_JOIN_ENABLE_CONFIG);
}
// Add associated broker node property overrides
if (brokerNode != null) {
props.putAll(brokerNode.propertyOverrides());
}
// Add associated controller node property overrides
if (controllerNode != null) {
props.putAll(controllerNode.propertyOverrides());
}
props.putIfAbsent(ServerConfigs.UNSTABLE_FEATURE_VERSIONS_ENABLE_CONFIG, "true");
props.putIfAbsent(ServerConfigs.UNSTABLE_API_VERSIONS_ENABLE_CONFIG, "true");
return new KafkaConfig(props, false);
}
private void setSecurityProtocolProps(Map<String, Object> props, String securityProtocol) {
if (securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name)) {
props.putIfAbsent(BrokerSecurityConfigs.SASL_ENABLED_MECHANISMS_CONFIG, "PLAIN");
props.putIfAbsent(BrokerSecurityConfigs.SASL_MECHANISM_INTER_BROKER_PROTOCOL_CONFIG, "PLAIN");
props.putIfAbsent(KRaftConfigs.SASL_MECHANISM_CONTROLLER_PROTOCOL_CONFIG, "PLAIN");
props.putIfAbsent(ServerConfigs.AUTHORIZER_CLASS_NAME_CONFIG, StandardAuthorizer.class.getName());
props.putIfAbsent(StandardAuthorizer.ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "false");
props.putIfAbsent(StandardAuthorizer.SUPER_USERS_CONFIG, "User:" + JaasUtils.KAFKA_PLAIN_ADMIN);
}
}
private Optional<File> maybeSetupJaasFile() throws Exception {
if (brokerSecurityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name)) {
File file = JaasUtils.writeJaasContextsToFile(Set.of(
new JaasUtils.JaasSection(JaasUtils.KAFKA_SERVER_CONTEXT_NAME,
List.of(
JaasModule.plainLoginModule(
JaasUtils.KAFKA_PLAIN_ADMIN,
JaasUtils.KAFKA_PLAIN_ADMIN_PASSWORD,
true,
Map.of(
JaasUtils.KAFKA_PLAIN_USER1, JaasUtils.KAFKA_PLAIN_USER1_PASSWORD,
JaasUtils.KAFKA_PLAIN_ADMIN, JaasUtils.KAFKA_PLAIN_ADMIN_PASSWORD)
)
)
)
));
JaasUtils.refreshJavaLoginConfigParam(file);
return Optional.of(file);
}
return Optional.empty();
}
public Builder setDeleteOnClose(boolean deleteOnClose) {
this.deleteOnClose = deleteOnClose;
return this;
}
public KafkaClusterTestKit build() throws Exception {
Map<Integer, ControllerServer> controllers = new HashMap<>();
Map<Integer, BrokerServer> brokers = new HashMap<>();
Map<Integer, SharedServer> jointServers = new HashMap<>();
File baseDirectory = null;
Optional<File> jaasFile = maybeSetupJaasFile();
try {
baseDirectory = new File(nodes.baseDirectory());
for (TestKitNode node : nodes.controllerNodes().values()) {
socketFactoryManager.getOrCreatePortForListener(node.id(), controllerListenerName);
}
for (TestKitNode node : nodes.brokerNodes().values()) {
socketFactoryManager.getOrCreatePortForListener(node.id(), brokerListenerName);
}
for (TestKitNode node : nodes.controllerNodes().values()) {
setupNodeDirectories(baseDirectory, node.metadataDirectory(), List.of());
KafkaConfig config = createNodeConfig(node);
SharedServer sharedServer = new SharedServer(
config,
node.initialMetaPropertiesEnsemble(),
Time.SYSTEM,
new Metrics(),
CompletableFuture.completedFuture(QuorumConfig.parseVoterConnections(config.quorumConfig().voters())),
QuorumConfig.parseBootstrapServers(config.quorumConfig().bootstrapServers()),
faultHandlerFactory,
socketFactoryManager.getOrCreateSocketFactory(node.id())
);
ControllerServer controller = null;
try {
controller = new ControllerServer(
sharedServer,
KafkaRaftServer.configSchema(),
nodes.bootstrapMetadata());
} catch (Throwable e) {
log.error("Error creating controller {}", node.id(), e);
Utils.swallow(log, Level.WARN, "sharedServer.stopForController error", sharedServer::stopForController);
throw e;
}
controllers.put(node.id(), controller);
jointServers.put(node.id(), sharedServer);
}
for (TestKitNode node : nodes.brokerNodes().values()) {
SharedServer sharedServer = jointServers.get(node.id());
if (sharedServer == null) {
KafkaConfig config = createNodeConfig(node);
sharedServer = new SharedServer(
config,
node.initialMetaPropertiesEnsemble(),
Time.SYSTEM,
new Metrics(),
CompletableFuture.completedFuture(QuorumConfig.parseVoterConnections(config.quorumConfig().voters())),
QuorumConfig.parseBootstrapServers(config.quorumConfig().bootstrapServers()),
faultHandlerFactory,
socketFactoryManager.getOrCreateSocketFactory(node.id())
);
jointServers.put(node.id(), sharedServer);
}
BrokerServer broker = null;
try {
broker = new BrokerServer(sharedServer);
} catch (Throwable e) {
log.error("Error creating broker {}", node.id(), e);
Utils.swallow(log, Level.WARN, "sharedServer.stopForBroker error", sharedServer::stopForBroker);
throw e;
}
brokers.put(node.id(), broker);
}
} catch (Exception e) {
for (BrokerServer brokerServer : brokers.values()) {
brokerServer.shutdown();
}
for (ControllerServer controller : controllers.values()) {
controller.shutdown();
}
if (baseDirectory != null) {
Utils.delete(baseDirectory);
}
socketFactoryManager.close();
throw e;
}
return new KafkaClusterTestKit(
nodes,
controllers,
brokers,
baseDirectory,
faultHandlerFactory,
socketFactoryManager,
jaasFile,
standalone,
initialVoterSet,
deleteOnClose);
}
private String listeners(int node) {
if (nodes.isCombined(node)) {
return String.format("%s://localhost:0,%s://localhost:0", brokerListenerName, controllerListenerName);
}
if (nodes.controllerNodes().containsKey(node)) {
return String.format("%s://localhost:0", controllerListenerName);
}
return String.format("%s://localhost:0", brokerListenerName);
}
private String roles(int node) {
if (nodes.isCombined(node)) {
return "broker,controller";
}
if (nodes.controllerNodes().containsKey(node)) {
return "controller";
}
return "broker";
}
private static void setupNodeDirectories(File baseDirectory,
String metadataDirectory,
Collection<String> logDataDirectories) throws Exception {
Files.createDirectories(new File(baseDirectory, "local").toPath());
Files.createDirectories(Paths.get(metadataDirectory));
for (String logDataDirectory : logDataDirectories) {
Files.createDirectories(Paths.get(logDataDirectory));
}
}
}
private static final String KAFKA_CLUSTER_THREAD_PREFIX = "kafka-cluster-test-kit-";
private final ExecutorService executorService;
private final KafkaClusterThreadFactory threadFactory = new KafkaClusterThreadFactory(KAFKA_CLUSTER_THREAD_PREFIX);
private final TestKitNodes nodes;
private final Map<Integer, ControllerServer> controllers;
private final Map<Integer, BrokerServer> brokers;
private final File baseDirectory;
private final SimpleFaultHandlerFactory faultHandlerFactory;
private final PreboundSocketFactoryManager socketFactoryManager;
private final String controllerListenerName;
private final Optional<File> jaasFile;
private final boolean standalone;
private final Optional<Map<Integer, Uuid>> initialVoterSet;
private final boolean deleteOnClose;
private KafkaClusterTestKit(
TestKitNodes nodes,
Map<Integer, ControllerServer> controllers,
Map<Integer, BrokerServer> brokers,
File baseDirectory,
SimpleFaultHandlerFactory faultHandlerFactory,
PreboundSocketFactoryManager socketFactoryManager,
Optional<File> jaasFile,
boolean standalone,
Optional<Map<Integer, Uuid>> initialVoterSet,
boolean deleteOnClose
) {
/*
Number of threads = Total number of brokers + Total number of controllers + Total number of Raft Managers
= Total number of brokers + Total number of controllers * 2
(Raft Manager per broker/controller)
*/
int numOfExecutorThreads = (nodes.brokerNodes().size() + nodes.controllerNodes().size()) * 2;
this.executorService = Executors.newFixedThreadPool(numOfExecutorThreads, threadFactory);
this.nodes = nodes;
this.controllers = controllers;
this.brokers = brokers;
this.baseDirectory = baseDirectory;
this.faultHandlerFactory = faultHandlerFactory;
this.socketFactoryManager = socketFactoryManager;
this.controllerListenerName = nodes.controllerListenerName().value();
this.jaasFile = jaasFile;
this.standalone = standalone;
this.initialVoterSet = initialVoterSet;
this.deleteOnClose = deleteOnClose;
}
public void format() throws Exception {
List<Future<?>> futures = new ArrayList<>();
try {
for (ControllerServer controller : controllers.values()) {
futures.add(executorService.submit(() -> formatNode(controller.sharedServer().metaPropsEnsemble(), true)));
}
for (Entry<Integer, BrokerServer> entry : brokers.entrySet()) {
BrokerServer broker = entry.getValue();
futures.add(executorService.submit(() -> formatNode(broker.sharedServer().metaPropsEnsemble(),
!nodes.isCombined(nodes().brokerNodes().get(entry.getKey()).id()))));
}
for (Future<?> future: futures) {
future.get();
}
} catch (Exception e) {
for (Future<?> future: futures) {
future.cancel(true);
}
throw e;
}
}
private void formatNode(
MetaPropertiesEnsemble ensemble,
boolean writeMetadataDirectory
) {
try {
final var nodeId = ensemble.nodeId().getAsInt();
Formatter formatter = new Formatter();
formatter.setNodeId(nodeId);
formatter.setClusterId(ensemble.clusterId().get());
if (writeMetadataDirectory) {
formatter.setDirectories(ensemble.logDirProps().keySet());
} else {
formatter.setDirectories(ensemble.logDirProps().keySet().stream().
filter(d -> !ensemble.metadataLogDir().get().equals(d)).
collect(Collectors.toSet()));
}
if (formatter.directories().isEmpty()) {
return;
}
formatter.setReleaseVersion(nodes.bootstrapMetadata().metadataVersion());
formatter.setUnstableFeatureVersionsEnabled(true);
formatter.setIgnoreFormatted(false);
formatter.setControllerListenerName(controllerListenerName);
if (writeMetadataDirectory) {
formatter.setMetadataLogDirectory(ensemble.metadataLogDir().get());
} else {
formatter.setMetadataLogDirectory(Optional.empty());
}
StringBuilder dynamicVotersBuilder = new StringBuilder();
String prefix = "";
if (standalone) {
if (nodeId == TestKitDefaults.BROKER_ID_OFFSET + TestKitDefaults.CONTROLLER_ID_OFFSET) {
final var controllerNode = nodes.controllerNodes().get(nodeId);
dynamicVotersBuilder.append(
String.format(
"%d@localhost:%d:%s",
controllerNode.id(),
socketFactoryManager.
getOrCreatePortForListener(controllerNode.id(), controllerListenerName),
controllerNode.metadataDirectoryId()
)
);
formatter.setInitialControllers(DynamicVoters.parse(dynamicVotersBuilder.toString()));
}
// when the nodeId != TestKitDefaults.CONTROLLER_ID_OFFSET, the node is formatting with
// the --no-initial-controllers flag
formatter.setHasDynamicQuorum(true);
} else if (initialVoterSet.isPresent()) {
for (final var controllerNode : initialVoterSet.get().entrySet()) {
final var voterId = controllerNode.getKey();
final var voterDirectoryId = controllerNode.getValue();
dynamicVotersBuilder.append(prefix);
prefix = ",";
dynamicVotersBuilder.append(
String.format(
"%d@localhost:%d:%s",
voterId,
socketFactoryManager.
getOrCreatePortForListener(voterId, controllerListenerName),
voterDirectoryId
)
);
}
formatter.setInitialControllers(DynamicVoters.parse(dynamicVotersBuilder.toString()));
formatter.setHasDynamicQuorum(true);
}
formatter.run();
} catch (Exception e) {
throw new RuntimeException("Failed to format node " + ensemble.nodeId(), e);
}
}
public void startup() throws ExecutionException, InterruptedException {
List<Future<?>> futures = new ArrayList<>();
try {
// Note the startup order here is chosen to be consistent with
// `KafkaRaftServer`. See comments in that class for an explanation.
for (ControllerServer controller : controllers.values()) {
futures.add(executorService.submit(controller::startup));
}
for (BrokerServer broker : brokers.values()) {
futures.add(executorService.submit(broker::startup));
}
for (Future<?> future: futures) {
future.get();
}
} catch (Exception e) {
for (Future<?> future: futures) {
future.cancel(true);
}
throw e;
}
}
/**
* Wait for a controller to mark all the brokers as ready (registered and unfenced).
* And also wait for the metadata cache up-to-date in each broker server.
*/
public void waitForReadyBrokers() throws ExecutionException, InterruptedException {
// We can choose any controller, not just the active controller.
// If we choose a standby controller, we will wait slightly longer.
ControllerServer controllerServer = controllers.values().iterator().next();
Controller controller = controllerServer.controller();
controller.waitForReadyBrokers(brokers.size()).get();
// make sure metadata cache in each broker server is up-to-date
TestUtils.waitForCondition(() ->
brokers.values().stream().map(BrokerServer::metadataCache)
.allMatch(cache -> brokers.values().stream().map(b -> b.config().brokerId()).allMatch(cache::hasAliveBroker)),
"Failed to wait for publisher to publish the metadata update to each broker.");
}
public class ClientPropertiesBuilder {
private final Properties properties;
private boolean usingBootstrapControllers = false;
public ClientPropertiesBuilder() {
this.properties = new Properties();
}
public ClientPropertiesBuilder(Properties properties) {
this.properties = properties;
}
public ClientPropertiesBuilder setUsingBootstrapControllers(boolean usingBootstrapControllers) {
this.usingBootstrapControllers = usingBootstrapControllers;
return this;
}
public Properties build() {
if (usingBootstrapControllers) {
properties.setProperty(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG, bootstrapControllers());
properties.remove(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG);
} else {
properties.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers());
properties.remove(AdminClientConfig.BOOTSTRAP_CONTROLLERS_CONFIG);
}
return properties;
}
}
public ClientPropertiesBuilder newClientPropertiesBuilder(Properties properties) {
return new ClientPropertiesBuilder(properties);
}
public ClientPropertiesBuilder newClientPropertiesBuilder() {
return new ClientPropertiesBuilder();
}
public Properties clientProperties() {
return new ClientPropertiesBuilder().build();
}
public String bootstrapServers() {
StringBuilder bld = new StringBuilder();
String prefix = "";
for (Entry<Integer, BrokerServer> entry : brokers.entrySet()) {
int brokerId = entry.getKey();
BrokerServer broker = entry.getValue();
ListenerName listenerName = nodes.brokerListenerName();
// The KafkaConfig#listeners method normalizes the listener name.
// The result from TestKitNodes#brokerListenerName method should be normalized as well,
// so that it matches the listener name in the KafkaConfig.
int port = broker.boundPort(ListenerName.normalised(listenerName.value()));
if (port <= 0) {
throw new RuntimeException("Broker " + brokerId + " does not yet " +
"have a bound port for " + listenerName + ". Did you start " +
"the cluster yet?");
}
bld.append(prefix).append("localhost:").append(port);
prefix = ",";
}
return bld.toString();
}
public String bootstrapControllers() {
StringBuilder bld = new StringBuilder();
String prefix = "";
for (Entry<Integer, ControllerServer> entry : controllers.entrySet()) {
int id = entry.getKey();
ControllerServer controller = entry.getValue();
ListenerName listenerName = nodes.controllerListenerName();
// Although the KafkaConfig#listeners method normalizes the listener name,
// the controller.listener.names configuration does not allow lowercase input,
// so there is no lowercase controller listener name, and we don't need to normalize it.
int port = controller.socketServer().boundPort(listenerName);
if (port <= 0) {
throw new RuntimeException("Controller " + id + " does not yet " +
"have a bound port for " + listenerName + ". Did you start " +
"the cluster yet?");
}
bld.append(prefix).append("localhost:").append(port);
prefix = ",";
}
return bld.toString();
}
public Map<Integer, ControllerServer> controllers() {
return controllers;
}
public Controller waitForActiveController() throws InterruptedException {
AtomicReference<Controller> active = new AtomicReference<>(null);
TestUtils.waitForCondition(() -> {
for (ControllerServer controllerServer : controllers.values()) {
if (controllerServer.controller().isActive()) {
active.set(controllerServer.controller());
}
}
return active.get() != null;
}, 60_000, "Controller not active");
return active.get();
}
public Map<Integer, BrokerServer> brokers() {
return brokers;
}
public Map<Integer, KafkaRaftManager<ApiMessageAndVersion>> raftManagers() {
Map<Integer, KafkaRaftManager<ApiMessageAndVersion>> results = new HashMap<>();
for (BrokerServer brokerServer : brokers().values()) {
results.put(brokerServer.config().brokerId(), brokerServer.sharedServer().raftManager());
}
for (ControllerServer controllerServer : controllers().values()) {
if (!results.containsKey(controllerServer.config().nodeId())) {
results.put(controllerServer.config().nodeId(), controllerServer.sharedServer().raftManager());
}
}
return results;
}
public TestKitNodes nodes() {
return nodes;
}
public MockFaultHandler fatalFaultHandler() {
return faultHandlerFactory.fatalFaultHandler();
}
public MockFaultHandler nonFatalFaultHandler() {
return faultHandlerFactory.nonFatalFaultHandler();
}
@Override
public void close() throws Exception {
List<Entry<String, Future<?>>> futureEntries = new ArrayList<>();
try {
// Note the shutdown order here is chosen to be consistent with
// `KafkaRaftServer`. See comments in that class for an explanation.
for (Entry<Integer, BrokerServer> entry : brokers.entrySet()) {
int brokerId = entry.getKey();
BrokerServer broker = entry.getValue();
futureEntries.add(new SimpleImmutableEntry<>("broker" + brokerId,
executorService.submit((Runnable) broker::shutdown)));
}
waitForAllFutures(futureEntries);
futureEntries.clear();
for (Entry<Integer, ControllerServer> entry : controllers.entrySet()) {
int controllerId = entry.getKey();
ControllerServer controller = entry.getValue();
futureEntries.add(new SimpleImmutableEntry<>("controller" + controllerId,
executorService.submit(controller::shutdown)));
}
waitForAllFutures(futureEntries);
futureEntries.clear();
if (deleteOnClose) {
Utils.delete(baseDirectory);
if (jaasFile.isPresent()) {
Utils.delete(jaasFile.get());
}
}
} catch (Exception e) {
for (Entry<String, Future<?>> entry : futureEntries) {
entry.getValue().cancel(true);
}
throw e;
} finally {
ThreadUtils.shutdownExecutorServiceQuietly(executorService, 5, TimeUnit.MINUTES);
socketFactoryManager.close();
}
waitForAllThreads();
faultHandlerFactory.fatalFaultHandler().maybeRethrowFirstException();
faultHandlerFactory.nonFatalFaultHandler().maybeRethrowFirstException();
}
private void waitForAllFutures(List<Entry<String, Future<?>>> futureEntries)
throws Exception {
for (Entry<String, Future<?>> entry : futureEntries) {
log.debug("waiting for {} to shut down.", entry.getKey());
entry.getValue().get();
log.debug("{} successfully shut down.", entry.getKey());
}
}
private void waitForAllThreads() throws InterruptedException {
TestUtils.waitForCondition(() -> Thread.getAllStackTraces().keySet()
.stream().noneMatch(t -> threadFactory.getThreadIds().contains(t.getId())),
"Failed to wait for all threads to shut down.");
}
}
|
apache/tapestry-5 | 35,310 | tapestry-core/src/main/java/org/apache/tapestry5/test/TapestryTestCase.java | // Copyright 2006-2014 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.test;
import org.apache.tapestry5.*;
import org.apache.tapestry5.annotations.Id;
import org.apache.tapestry5.annotations.Parameter;
import org.apache.tapestry5.annotations.Path;
import org.apache.tapestry5.beanmodel.BeanModel;
import org.apache.tapestry5.beanmodel.PropertyConduit;
import org.apache.tapestry5.beanmodel.PropertyModel;
import org.apache.tapestry5.beanmodel.services.*;
import org.apache.tapestry5.commons.*;
import org.apache.tapestry5.commons.util.CollectionFactory;
import org.apache.tapestry5.http.Link;
import org.apache.tapestry5.http.services.BaseURLSource;
import org.apache.tapestry5.http.services.Context;
import org.apache.tapestry5.http.services.HttpServletRequestHandler;
import org.apache.tapestry5.http.services.Request;
import org.apache.tapestry5.http.services.RequestGlobals;
import org.apache.tapestry5.http.services.RequestHandler;
import org.apache.tapestry5.http.services.Response;
import org.apache.tapestry5.http.services.Session;
import org.apache.tapestry5.internal.services.MapMessages;
import org.apache.tapestry5.internal.services.MarkupWriterImpl;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.ioc.internal.util.InternalUtils;
import org.apache.tapestry5.ioc.test.IOCTestCase;
import org.apache.tapestry5.model.ComponentModel;
import org.apache.tapestry5.model.EmbeddedComponentModel;
import org.apache.tapestry5.model.MutableComponentModel;
import org.apache.tapestry5.model.ParameterModel;
import org.apache.tapestry5.runtime.Component;
import org.apache.tapestry5.services.ApplicationStateCreator;
import org.apache.tapestry5.services.ApplicationStateManager;
import org.apache.tapestry5.services.ApplicationStatePersistenceStrategy;
import org.apache.tapestry5.services.ApplicationStatePersistenceStrategySource;
import org.apache.tapestry5.services.AssetFactory;
import org.apache.tapestry5.services.AssetSource;
import org.apache.tapestry5.services.BindingFactory;
import org.apache.tapestry5.services.BindingSource;
import org.apache.tapestry5.services.ClasspathAssetAliasManager;
import org.apache.tapestry5.services.ComponentClassResolver;
import org.apache.tapestry5.services.ComponentEventRequestHandler;
import org.apache.tapestry5.services.ComponentEventResultProcessor;
import org.apache.tapestry5.services.ComponentRequestHandler;
import org.apache.tapestry5.services.Environment;
import org.apache.tapestry5.services.FieldTranslatorSource;
import org.apache.tapestry5.services.FieldValidatorSource;
import org.apache.tapestry5.services.FormSupport;
import org.apache.tapestry5.services.Heartbeat;
import org.apache.tapestry5.services.Html5Support;
import org.apache.tapestry5.services.MetaDataLocator;
import org.apache.tapestry5.services.PageRenderLinkSource;
import org.apache.tapestry5.services.ResourceDigestGenerator;
import org.apache.tapestry5.services.TranslatorSource;
import org.apache.tapestry5.services.ValidationConstraintGenerator;
import org.apache.tapestry5.services.ValueEncoderSource;
import org.apache.tapestry5.services.javascript.JavaScriptSupport;
import org.easymock.IAnswer;
import jakarta.servlet.ServletOutputStream;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
/**
* Base test case that adds a number of convenience factory and training methods for the public
* interfaces of
* Tapestry.
*/
@SuppressWarnings("all")
public abstract class TapestryTestCase extends IOCTestCase
{
/**
* Creates a new markup writer instance (not a markup writer mock). Output can be directed at
* the writer, which uses
* the default (HTML) markup model. The writer's toString() value represents all the collected
* markup in the
* writer.
*/
protected final MarkupWriter createMarkupWriter()
{
return new MarkupWriterImpl();
}
protected final ApplicationStateCreator mockApplicationStateCreator()
{
return newMock(ApplicationStateCreator.class);
}
protected final ApplicationStatePersistenceStrategy mockApplicationStatePersistenceStrategy()
{
return newMock(ApplicationStatePersistenceStrategy.class);
}
protected final ApplicationStatePersistenceStrategySource mockApplicationStatePersistenceStrategySource()
{
return newMock(ApplicationStatePersistenceStrategySource.class);
}
protected final Asset mockAsset()
{
return newMock(Asset.class);
}
protected final AssetFactory mockAssetFactory()
{
return newMock(AssetFactory.class);
}
protected final AssetSource mockAssetSource()
{
return newMock(AssetSource.class);
}
protected final Binding mockBinding()
{
return newMock(Binding.class);
}
protected final BindingFactory mockBindingFactory()
{
return newMock(BindingFactory.class);
}
protected final BindingSource mockBindingSource()
{
return newMock(BindingSource.class);
}
protected final Block mockBlock()
{
return newMock(Block.class);
}
protected final ClasspathAssetAliasManager mockClasspathAssetAliasManager()
{
return newMock(ClasspathAssetAliasManager.class);
}
protected final Component mockComponent()
{
return newMock(Component.class);
}
protected final ComponentClassResolver mockComponentClassResolver()
{
return newMock(ComponentClassResolver.class);
}
protected final ComponentEventCallback mockComponentEventHandler()
{
return newMock(ComponentEventCallback.class);
}
protected final ComponentModel mockComponentModel()
{
return newMock(ComponentModel.class);
}
protected final ComponentResources mockComponentResources()
{
return newMock(ComponentResources.class);
}
protected final Context mockContext()
{
return newMock(Context.class);
}
protected final Environment mockEnvironment()
{
return newMock(Environment.class);
}
protected final Field mockField()
{
return newMock(Field.class);
}
protected final Html5Support mockHtml5Support()
{
return newMock(Html5Support.class);
}
protected final FieldValidator mockFieldValidator()
{
return newMock(FieldValidator.class);
}
protected FieldValidatorSource mockFieldValidatorSource()
{
return newMock(FieldValidatorSource.class);
}
protected final Field mockFieldWithLabel(String label)
{
Field field = mockField();
train_getLabel(field, label);
return field;
}
protected final Heartbeat mockHeartbeat()
{
return newMock(Heartbeat.class);
}
protected final HttpServletRequest mockHttpServletRequest()
{
return newMock(HttpServletRequest.class);
}
protected final HttpServletResponse mockHttpServletResponse()
{
return newMock(HttpServletResponse.class);
}
protected final HttpSession mockHttpSession()
{
return newMock(HttpSession.class);
}
protected final Inject mockInject()
{
return newMock(Inject.class);
}
protected final Link mockLink()
{
return newMock(Link.class);
}
protected final MarkupWriter mockMarkupWriter()
{
return newMock(MarkupWriter.class);
}
protected final MutableComponentModel mockMutableComponentModel()
{
return newMock(MutableComponentModel.class);
}
protected final ParameterModel mockParameterModel()
{
return newMock(ParameterModel.class);
}
protected final Path mockPath()
{
return newMock(Path.class);
}
protected final PropertyConduit mockPropertyConduit()
{
return newMock(PropertyConduit.class);
}
protected final PropertyModel mockPropertyModel()
{
return newMock(PropertyModel.class);
}
protected final Request mockRequest()
{
return newMock(Request.class);
}
protected final RequestHandler mockRequestHandler()
{
return newMock(RequestHandler.class);
}
protected final Response mockResponse()
{
return newMock(Response.class);
}
protected final Session mockSession()
{
return newMock(Session.class);
}
protected final Translator mockTranslator()
{
return newMock(Translator.class);
}
protected final ValidationConstraintGenerator mockValidationConstraintGenerator()
{
return newMock(ValidationConstraintGenerator.class);
}
protected final ValidationTracker mockValidationTracker()
{
return newMock(ValidationTracker.class);
}
protected final Validator mockValidator()
{
return newMock(Validator.class);
}
protected final void train_buildConstraints(ValidationConstraintGenerator generator, Class propertyType,
AnnotationProvider provider, String... constraints)
{
expect(generator.buildConstraints(propertyType, provider)).andReturn(Arrays.asList(constraints));
}
protected final <T> void train_create(ApplicationStateCreator<T> creator, T aso)
{
expect(creator.create()).andReturn(aso);
}
protected final void train_createAsset(AssetFactory factory, Resource resource, Asset asset)
{
expect(factory.createAsset(resource)).andReturn(asset);
}
protected final void train_createValidator(FieldValidatorSource source, Field field, String validatorType,
String constraintValue, String overrideId, Messages overrideMessages, Locale locale, FieldValidator result)
{
expect(source.createValidator(field, validatorType, constraintValue, overrideId, overrideMessages, locale))
.andReturn(result);
}
protected final void train_encodeRedirectURL(Response response, String URI, String encoded)
{
expect(response.encodeRedirectURL(URI)).andReturn(encoded);
}
protected final void train_encodeURL(Response response, String inputURL, String outputURL)
{
expect(response.encodeURL(inputURL)).andReturn(outputURL);
}
protected final <T> void train_exists(ApplicationStatePersistenceStrategy strategy, Class<T> asoClass,
boolean exists)
{
expect(strategy.exists(asoClass)).andReturn(exists);
}
protected final void train_getAsset(AssetSource source, Resource root, String path, Locale locale, Asset asset)
{
expect(source.getAsset(root, path, locale)).andReturn(asset);
}
protected final void train_generateChecksum(ResourceDigestGenerator generator, URL url, String digest)
{
expect(generator.generateDigest(url)).andReturn(digest);
}
protected final <T> void train_get(ApplicationStatePersistenceStrategy strategy, Class<T> asoClass,
ApplicationStateCreator<T> creator, T aso)
{
expect(strategy.get(asoClass, creator)).andReturn(aso);
}
protected final void train_get(ApplicationStatePersistenceStrategySource source, String strategyName,
ApplicationStatePersistenceStrategy strategy)
{
expect(source.get(strategyName)).andReturn(strategy).atLeastOnce();
}
protected final void train_get(Binding binding, Object value)
{
expect(binding.get()).andReturn(value);
}
protected final <T> void train_getIfExists(ApplicationStatePersistenceStrategy strategy, Class<T> asoClass,
T aso)
{
expect(strategy.getIfExists(asoClass)).andReturn(aso);
}
protected void train_getAttribute(HttpSession session, String attributeName, Object value)
{
expect(session.getAttribute(attributeName)).andReturn(value);
}
protected final void train_getAttribute(Session session, String name, Object attribute)
{
expect(session.getAttribute(name)).andReturn(attribute);
}
protected final void train_getAttributeNames(Session session, String prefix, String... names)
{
expect(session.getAttributeNames(prefix)).andReturn(Arrays.asList(names));
}
protected final void train_getBaseResource(ComponentModel model, Resource resource)
{
expect(model.getBaseResource()).andReturn(resource).atLeastOnce();
}
protected final void train_getClasspathAsset(AssetSource source, String path, Asset asset)
{
expect(source.getClasspathAsset(path)).andReturn(asset);
}
protected final void train_getClasspathAsset(AssetSource source, String path, Locale locale, Asset asset)
{
expect(source.getClasspathAsset(path, locale)).andReturn(asset);
}
protected final void train_getCompleteId(ComponentResourcesCommon resources, String completeId)
{
expect(resources.getCompleteId()).andReturn(completeId).atLeastOnce();
}
protected final void train_getComponent(ComponentResources resources, Component component)
{
expect(resources.getComponent()).andReturn(component).atLeastOnce();
}
protected final void train_getComponentClassName(ComponentModel model, String className)
{
expect(model.getComponentClassName()).andReturn(className).atLeastOnce();
}
protected final void train_getComponentResources(Component component, ComponentResources resources)
{
expect(component.getComponentResources()).andReturn(resources).atLeastOnce();
}
protected final void train_getConduit(PropertyModel model, PropertyConduit conduit)
{
expect(model.getConduit()).andReturn(conduit).atLeastOnce();
}
protected <C, T> void train_getConstraintType(Validator<C, T> validator, Class<C> constraintType)
{
expect(validator.getConstraintType()).andReturn(constraintType).atLeastOnce();
}
protected final void train_getContainer(ComponentResources resources, Component container)
{
expect(resources.getContainer()).andReturn(container).atLeastOnce();
}
protected final void train_getContainerMessages(ComponentResources resources, Messages containerMessages)
{
expect(resources.getContainerMessages()).andReturn(containerMessages).atLeastOnce();
}
protected final void train_getContainerResources(ComponentResources resources, ComponentResources containerResources)
{
expect(resources.getContainerResources()).andReturn(containerResources).atLeastOnce();
}
protected final void train_getDateHeader(Request request, String name, long value)
{
expect(request.getDateHeader(name)).andReturn(value).atLeastOnce();
}
protected final void train_getFieldPersistenceStrategy(ComponentModel model, String fieldName, String fieldStrategy)
{
expect(model.getFieldPersistenceStrategy(fieldName)).andReturn(fieldStrategy).atLeastOnce();
}
protected final void train_getId(ComponentResources resources, String id)
{
expect(resources.getId()).andReturn(id).atLeastOnce();
}
protected final void train_getLabel(Field field, String label)
{
expect(field.getLabel()).andReturn(label).atLeastOnce();
}
protected final void train_getLocale(ComponentResourcesCommon resources, Locale locale)
{
expect(resources.getLocale()).andReturn(locale).atLeastOnce();
}
protected final void train_getLocale(Request request, Locale locale)
{
expect(request.getLocale()).andReturn(locale).atLeastOnce();
}
protected void train_getMessageKey(Validator validator, String messageKey)
{
expect(validator.getMessageKey()).andReturn(messageKey).atLeastOnce();
}
protected final void train_getMessages(ComponentResources resources, Messages messages)
{
expect(resources.getMessages()).andReturn(messages).atLeastOnce();
}
protected final void train_getMeta(ComponentModel model, String key, String value)
{
expect(model.getMeta(key)).andReturn(value).atLeastOnce();
}
protected final void train_getOutputStream(HttpServletResponse response, ServletOutputStream stream)
{
try
{
expect(response.getOutputStream()).andReturn(stream);
} catch (IOException e)
{
fail(e.getMessage(), e);
}
}
protected final void train_getPage(ComponentResources resources, Component page)
{
expect(resources.getPage()).andReturn(page).atLeastOnce();
}
protected final void train_getParameterModel(ComponentModel model, String parameterName,
ParameterModel parameterModel)
{
expect(model.getParameterModel(parameterName)).andReturn(parameterModel).atLeastOnce();
}
protected final void train_getParameterNames(ComponentModel model, String... names)
{
expect(model.getParameterNames()).andReturn(Arrays.asList(names));
}
protected final void train_getParentModel(ComponentModel model, ComponentModel parentModel)
{
expect(model.getParentModel()).andReturn(parentModel).atLeastOnce();
}
protected final void train_getPath(Request request, String path)
{
expect(request.getPath()).andReturn(path).atLeastOnce();
}
protected final void train_getMethod(Request request, String method)
{
expect(request.getMethod()).andReturn(method).atLeastOnce();
}
protected final void train_getPersistentFieldNames(ComponentModel model, String... names)
{
expect(model.getPersistentFieldNames()).andReturn(Arrays.asList(names)).atLeastOnce();
}
protected final void train_getRootResource(AssetFactory factory, Resource rootResource)
{
expect(factory.getRootResource()).andReturn(rootResource);
}
protected final void train_getSession(HttpServletRequest request, boolean create, HttpSession session)
{
expect(request.getSession(create)).andReturn(session);
}
protected void train_getSession(Request request, boolean create, Session session)
{
expect(request.getSession(create)).andReturn(session);
}
protected final void train_getSupportsInformalParameters(ComponentModel model, boolean supports)
{
expect(model.getSupportsInformalParameters()).andReturn(supports).atLeastOnce();
}
protected final void train_getValueType(Validator validator, Class valueType)
{
expect(validator.getValueType()).andReturn(valueType).atLeastOnce();
}
@SuppressWarnings("unchecked")
protected final void train_handleResult(ComponentEventCallback handler, Object result, boolean abort)
{
expect(handler.handleResult(result)).andReturn(abort);
}
protected final void train_inError(ValidationTracker tracker, Field field, boolean inError)
{
expect(tracker.inError(field)).andReturn(inError);
}
protected final void train_isRequired(Validator validator, boolean isRequired)
{
expect(validator.isRequired()).andReturn(isRequired).atLeastOnce();
}
protected final void train_isInvariant(Binding binding, boolean isInvariant)
{
expect(binding.isInvariant()).andReturn(isInvariant);
}
protected final void train_isRequired(ParameterModel model, boolean isRequired)
{
expect(model.isRequired()).andReturn(isRequired);
}
protected final void train_isRootClass(MutableComponentModel model, boolean isRootClass)
{
expect(model.isRootClass()).andReturn(isRootClass);
}
protected final void train_name(Parameter parameter, String name)
{
expect(parameter.name()).andReturn(name).atLeastOnce();
}
protected final void train_newBinding(BindingFactory factory, String description, ComponentResources container,
ComponentResources component, String expression, Location l, Binding binding)
{
expect(factory.newBinding(description, container, component, expression, l)).andReturn(binding);
}
protected void train_newBinding(BindingSource bindingSource, String description,
ComponentResources componentResources, String defaultBindingPrefix, String expression, Binding binding)
{
expect(bindingSource.newBinding(description, componentResources, defaultBindingPrefix, expression)).andReturn(
binding);
}
protected final <T> void train_peek(Environment env, Class<T> type, T value)
{
expect(env.peek(type)).andReturn(value);
}
protected final <T> void train_peekRequired(Environment env, Class<T> type, T value)
{
expect(env.peekRequired(type)).andReturn(value);
}
@SuppressWarnings("unchecked")
protected final void train_renderInformalParameters(ComponentResources resources, final MarkupWriter writer,
final Object... informals)
{
resources.renderInformalParameters(writer);
IAnswer answer = new IAnswer()
{
public Object answer() throws Throwable
{
writer.attributes(informals);
return null;
}
};
setAnswer(answer);
}
protected final void train_service(RequestHandler handler, Request request, Response response, boolean result)
throws IOException
{
expect(handler.service(request, response)).andReturn(result);
}
protected final void train_setContentLength(HttpServletResponse response, int length)
{
response.setContentLength(length);
}
protected final void train_setContentType(HttpServletResponse response, String contentType)
{
response.setContentType(contentType);
}
protected final void train_setDateHeader(HttpServletResponse response, String headerName, long date)
{
response.setDateHeader(headerName, date);
}
/**
* @deprecated Deprecated in 5.4 with no replacement.
*/
protected final void train_toClientURL(Asset asset, String URL)
{
expect(asset.toClientURL()).andReturn(URL).atLeastOnce();
}
protected final void train_toRedirectURI(Link link, String URI)
{
expect(link.toRedirectURI()).andReturn(URI).atLeastOnce();
}
protected final void train_value(Id annotation, String value)
{
expect(annotation.value()).andReturn(value).atLeastOnce();
}
protected final void train_value(Path annotation, String value)
{
expect(annotation.value()).andReturn(value).atLeastOnce();
}
protected final void train_getBoundType(ComponentResources resources, String parameterName, Class type)
{
expect(resources.getBoundType(parameterName)).andReturn(type);
}
protected final BeanModel mockBeanModel()
{
return newMock(BeanModel.class);
}
protected final BeanModelSource mockBeanModelSource()
{
return newMock(BeanModelSource.class);
}
public final void train_getLocation(Locatable locatable, Location location)
{
expect(locatable.getLocation()).andReturn(location).atLeastOnce();
}
public final void train_getResource(Location location, Resource resource)
{
expect(location.getResource()).andReturn(resource).atLeastOnce();
}
public final void train_getLine(Location location, int line)
{
expect(location.getLine()).andReturn(line).atLeastOnce();
}
protected final void train_getParameter(Request request, String elementName, String value)
{
expect(request.getParameter(elementName)).andReturn(value).atLeastOnce();
}
protected final void train_getPageName(ComponentResourcesCommon resources, String pageName)
{
expect(resources.getPageName()).andReturn(pageName).atLeastOnce();
}
protected final FormSupport mockFormSupport()
{
return newMock(FormSupport.class);
}
/**
* Provides access to component messages, suitable for testing. Reads the associated .properties
* file for the class
* (NOT any localization of it). Only the messages directly in the .properties file is
* available.
*
* @param componentClass component class whose messages are needed *
* @return the Messages instance
*/
protected final Messages messagesFor(Class componentClass) throws IOException
{
String file = componentClass.getSimpleName() + ".properties";
Properties properties = new Properties();
InputStream is = null;
try
{
is = componentClass.getResourceAsStream(file);
if (is == null)
throw new RuntimeException(String.format("Class %s does not have a message catalog.",
componentClass.getName()));
properties.load(is);
} finally
{
InternalUtils.close(is);
}
Map<String, String> map = CollectionFactory.newCaseInsensitiveMap();
for (Object key : properties.keySet())
{
String skey = (String) key;
map.put(skey, properties.getProperty(skey));
}
return new MapMessages(Locale.ENGLISH, map);
}
protected final FieldValidationSupport mockFieldValidationSupport()
{
return newMock(FieldValidationSupport.class);
}
protected final void train_getInheritInformalParameters(EmbeddedComponentModel model, boolean inherits)
{
expect(model.getInheritInformalParameters()).andReturn(inherits).atLeastOnce();
}
protected final ApplicationStateManager mockApplicationStateManager()
{
return newMock(ApplicationStateManager.class);
}
protected final <T> void train_get(ApplicationStateManager manager, Class<T> asoClass, T aso)
{
expect(manager.get(asoClass)).andReturn(aso);
}
protected final void train_getInput(ValidationTracker tracker, Field field, String input)
{
expect(tracker.getInput(field)).andReturn(input);
}
protected final void train_isXHR(Request request, boolean isXHR)
{
expect(request.isXHR()).andReturn(isXHR).atLeastOnce();
}
protected void train_getPathInfo(HttpServletRequest request, String pathInfo)
{
expect(request.getPathInfo()).andReturn(pathInfo).atLeastOnce();
}
protected final void train_service(HttpServletRequestHandler handler, HttpServletRequest request,
HttpServletResponse response, boolean result) throws IOException
{
expect(handler.service(request, response)).andReturn(result);
}
protected final void train_getServletPath(HttpServletRequest request, String path)
{
expect(request.getServletPath()).andReturn(path).atLeastOnce();
}
protected final HttpServletRequestHandler mockHttpServletRequestHandler()
{
return newMock(HttpServletRequestHandler.class);
}
protected final NullFieldStrategy mockNullFieldStrategy()
{
return newMock(NullFieldStrategy.class);
}
protected final ValueEncoderSource mockValueEncoderSource()
{
return newMock(ValueEncoderSource.class);
}
protected final ValueEncoder mockValueEncoder()
{
return newMock(ValueEncoder.class);
}
protected final void train_toClient(ValueEncoder valueEncoder, Object value, String encoded)
{
expect(valueEncoder.toClient(value)).andReturn(encoded);
}
protected final void train_getValueEncoder(ValueEncoderSource source, Class type, ValueEncoder valueEncoder)
{
expect(source.getValueEncoder(type)).andReturn(valueEncoder).atLeastOnce();
}
protected final void train_toValue(ValueEncoder valueEncoder, String clientValue, Object value)
{
expect(valueEncoder.toValue(clientValue)).andReturn(value);
}
protected <T> void train_findMeta(MetaDataLocator locator, String key, ComponentResources resources,
Class<T> expectedType, T value)
{
expect(locator.findMeta(key, resources, expectedType)).andReturn(value).atLeastOnce();
}
protected MetaDataLocator mockMetaDataLocator()
{
return newMock(MetaDataLocator.class);
}
protected final void train_isSecure(Request request, boolean isSecure)
{
expect(request.isSecure()).andReturn(isSecure).atLeastOnce();
}
protected final void train_getBaseURL(BaseURLSource baseURLSource, boolean secure, String baseURL)
{
expect(baseURLSource.getBaseURL(secure)).andReturn(baseURL);
}
protected final BaseURLSource mockBaseURLSource()
{
return newMock(BaseURLSource.class);
}
protected final void train_getAttribute(Request request, String attibuteName, Object value)
{
expect(request.getAttribute(attibuteName)).andReturn(value);
}
protected final void train_getBlockParameter(ComponentResources resources, String name, Block block)
{
expect(resources.getBlockParameter(name)).andReturn(block).atLeastOnce();
}
protected final PropertyOverrides mockPropertyOverrides()
{
return newMock(PropertyOverrides.class);
}
protected void train_getOverrideBlock(PropertyOverrides overrides, String name, Block block)
{
expect(overrides.getOverrideBlock(name)).andReturn(block).atLeastOnce();
}
protected final void train_getOverrideMessages(PropertyOverrides overrides, Messages messages)
{
expect(overrides.getOverrideMessages()).andReturn(messages);
}
protected final void train_isDisabled(Field field, boolean disabled)
{
expect(field.isDisabled()).andReturn(disabled);
}
protected final ValidationDecorator mockValidationDecorator()
{
return newMock(ValidationDecorator.class);
}
protected final void train_isRequired(Field field, boolean required)
{
expect(field.isRequired()).andReturn(required);
}
protected final void train_getClientId(ClientElement element, String clientId)
{
expect(element.getClientId()).andReturn(clientId);
}
protected final FieldTranslator mockFieldTranslator()
{
return newMock(FieldTranslator.class);
}
protected final Translator mockTranslator(String name, Class type)
{
Translator translator = mockTranslator();
train_getName(translator, name);
train_getType(translator, type);
return translator;
}
protected final void train_getName(Translator translator, String name)
{
expect(translator.getName()).andReturn(name).atLeastOnce();
}
protected final void train_getType(Translator translator, Class type)
{
expect(translator.getType()).andReturn(type).atLeastOnce();
}
protected final void train_createDefaultTranslator(FieldTranslatorSource source, ComponentResources resources,
String parameterName, FieldTranslator translator)
{
expect(source.createDefaultTranslator(resources, parameterName)).andReturn(translator);
}
protected final TranslatorSource mockTranslatorSource()
{
return newMock(TranslatorSource.class);
}
protected final void train_get(TranslatorSource translatorSource, String name, Translator translator)
{
expect(translatorSource.get(name)).andReturn(translator).atLeastOnce();
}
protected final void train_getMessageKey(Translator translator, String messageKey)
{
expect(translator.getMessageKey()).andReturn(messageKey).atLeastOnce();
}
protected final void train_findByType(TranslatorSource ts, Class propertyType, Translator translator)
{
expect(ts.findByType(propertyType)).andReturn(translator);
}
protected final void train_toURI(Link link, String URI)
{
expect(link.toURI()).andReturn(URI);
}
protected final void train_createEditModel(BeanModelSource source, Class beanClass, Messages messages,
BeanModel model)
{
expect(source.createEditModel(beanClass, messages)).andReturn(model);
}
protected final ComponentEventResultProcessor mockComponentEventResultProcessor()
{
return newMock(ComponentEventResultProcessor.class);
}
protected final void train_getFormComponentId(FormSupport formSupport, String componentId)
{
expect(formSupport.getFormComponentId()).andReturn(componentId).atLeastOnce();
}
protected final void train_getFormValidationId(FormSupport formSupport, String validationId)
{
expect(formSupport.getFormValidationId()).andReturn(validationId).atLeastOnce();
}
protected final void train_isAllowNull(ParameterModel model, boolean allowNull)
{
expect(model.isAllowNull()).andReturn(allowNull).atLeastOnce();
}
protected final void train_isInvalidated(Session session, boolean invalidated)
{
expect(session.isInvalidated()).andReturn(invalidated);
}
protected final ComponentEventRequestHandler mockComponentEventRequestHandler()
{
return newMock(ComponentEventRequestHandler.class);
}
protected final ComponentRequestHandler mockComponentRequestHandler()
{
return newMock(ComponentRequestHandler.class);
}
/**
* @since 5.2.0
*/
protected final RequestGlobals mockRequestGlobals()
{
return newMock(RequestGlobals.class);
}
protected final PageRenderLinkSource mockPageRenderLinkSource()
{
return newMock(PageRenderLinkSource.class);
}
protected final JavaScriptSupport mockJavaScriptSupport()
{
return newMock(JavaScriptSupport.class);
}
protected final Asset mockAsset(String assetURL)
{
Asset asset = mockAsset();
train_toClientURL(asset, assetURL);
return asset;
}
protected final Link mockLink(String absoluteURI)
{
Link link = mockLink();
expect(link.toURI()).andReturn(absoluteURI).atLeastOnce();
return link;
}
}
|
apache/xmlgraphics-fop | 35,391 | fop-core/src/main/java/org/apache/fop/complexscripts/util/CharScript.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.complexscripts.util;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.fop.util.CharUtilities;
/**
* <p>Script related utilities.</p>
*
* <p>This work was originally authored by Glenn Adams (gadams@apache.org).</p>
*/
public final class CharScript {
// CSOFF: LineLength
//
// The following script codes are based on ISO 15924. Codes less than 1000 are
// official assignments from 15924; those equal to or greater than 1000 are FOP
// implementation specific.
//
/** hebrew script constant */
public static final int SCRIPT_HEBREW = 125; // 'hebr'
/** mongolian script constant */
public static final int SCRIPT_MONGOLIAN = 145; // 'mong'
/** arabic script constant */
public static final int SCRIPT_ARABIC = 160; // 'arab'
/** greek script constant */
public static final int SCRIPT_GREEK = 200; // 'grek'
/** latin script constant */
public static final int SCRIPT_LATIN = 215; // 'latn'
/** cyrillic script constant */
public static final int SCRIPT_CYRILLIC = 220; // 'cyrl'
/** georgian script constant */
public static final int SCRIPT_GEORGIAN = 240; // 'geor'
/** bopomofo script constant */
public static final int SCRIPT_BOPOMOFO = 285; // 'bopo'
/** hangul script constant */
public static final int SCRIPT_HANGUL = 286; // 'hang'
/** gurmukhi script constant */
public static final int SCRIPT_GURMUKHI = 310; // 'guru'
/** gurmukhi 2 script constant */
public static final int SCRIPT_GURMUKHI_2 = 1310; // 'gur2' -- MSFT (pseudo) script tag for variant shaping semantics
/** devanagari script constant */
public static final int SCRIPT_DEVANAGARI = 315; // 'deva'
/** devanagari 2 script constant */
public static final int SCRIPT_DEVANAGARI_2 = 1315; // 'dev2' -- MSFT (pseudo) script tag for variant shaping semantics
/** gujarati script constant */
public static final int SCRIPT_GUJARATI = 320; // 'gujr'
/** gujarati 2 script constant */
public static final int SCRIPT_GUJARATI_2 = 1320; // 'gjr2' -- MSFT (pseudo) script tag for variant shaping semantics
/** bengali script constant */
public static final int SCRIPT_BENGALI = 326; // 'beng'
/** bengali 2 script constant */
public static final int SCRIPT_BENGALI_2 = 1326; // 'bng2' -- MSFT (pseudo) script tag for variant shaping semantics
/** oriya script constant */
public static final int SCRIPT_ORIYA = 327; // 'orya'
/** oriya 2 script constant */
public static final int SCRIPT_ORIYA_2 = 1327; // 'ory2' -- MSFT (pseudo) script tag for variant shaping semantics
/** tibetan script constant */
public static final int SCRIPT_TIBETAN = 330; // 'tibt'
/** telugu script constant */
public static final int SCRIPT_TELUGU = 340; // 'telu'
/** telugu 2 script constant */
public static final int SCRIPT_TELUGU_2 = 1340; // 'tel2' -- MSFT (pseudo) script tag for variant shaping semantics
/** kannada script constant */
public static final int SCRIPT_KANNADA = 345; // 'knda'
/** kannada 2 script constant */
public static final int SCRIPT_KANNADA_2 = 1345; // 'knd2' -- MSFT (pseudo) script tag for variant shaping semantics
/** tamil script constant */
public static final int SCRIPT_TAMIL = 346; // 'taml'
/** tamil 2 script constant */
public static final int SCRIPT_TAMIL_2 = 1346; // 'tml2' -- MSFT (pseudo) script tag for variant shaping semantics
/** malayalam script constant */
public static final int SCRIPT_MALAYALAM = 347; // 'mlym'
/** malayalam 2 script constant */
public static final int SCRIPT_MALAYALAM_2 = 1347; // 'mlm2' -- MSFT (pseudo) script tag for variant shaping semantics
/** sinhalese script constant */
public static final int SCRIPT_SINHALESE = 348; // 'sinh'
/** burmese script constant */
public static final int SCRIPT_BURMESE = 350; // 'mymr'
/** thai script constant */
public static final int SCRIPT_THAI = 352; // 'thai'
/** khmer script constant */
public static final int SCRIPT_KHMER = 355; // 'khmr'
/** lao script constant */
public static final int SCRIPT_LAO = 356; // 'laoo'
/** hiragana script constant */
public static final int SCRIPT_HIRAGANA = 410; // 'hira'
/** ethiopic script constant */
public static final int SCRIPT_ETHIOPIC = 430; // 'ethi'
/** han script constant */
public static final int SCRIPT_HAN = 500; // 'hani'
/** katakana script constant */
public static final int SCRIPT_KATAKANA = 410; // 'kana'
/** math script constant */
public static final int SCRIPT_MATH = 995; // 'zmth'
/** symbol script constant */
public static final int SCRIPT_SYMBOL = 996; // 'zsym'
/** undetermined script constant */
public static final int SCRIPT_UNDETERMINED = 998; // 'zyyy'
/** uncoded script constant */
public static final int SCRIPT_UNCODED = 999; // 'zzzz'
/**
* A static (class) parameter indicating whether V2 indic shaping
* rules apply or not, with default being <code>true</code>.
*/
private static final boolean USE_V2_INDIC = true;
private CharScript() {
}
/**
* Determine if character c is punctuation.
* @param c a character represented as a unicode scalar value
* @return true if character is punctuation
*/
public static boolean isPunctuation(int c) {
if ((c >= 0x0021) && (c <= 0x002F)) { // basic latin punctuation
return true;
} else if ((c >= 0x003A) && (c <= 0x0040)) { // basic latin punctuation
return true;
} else if ((c >= 0x005F) && (c <= 0x0060)) { // basic latin punctuation
return true;
} else if ((c >= 0x007E) && (c <= 0x007E)) { // basic latin punctuation
return true;
} else if ((c >= 0x00A1) && (c <= 0x00BF)) { // latin supplement punctuation
return true;
} else if ((c >= 0x00D7) && (c <= 0x00D7)) { // latin supplement punctuation
return true;
} else if ((c >= 0x00F7) && (c <= 0x00F7)) { // latin supplement punctuation
return true;
} else if ((c >= 0x2000) && (c <= 0x206F)) { // general punctuation
return true;
} else { // [TBD] - not complete
return false;
}
}
/**
* Determine if character c is a digit.
* @param c a character represented as a unicode scalar value
* @return true if character is a digit
*/
public static boolean isDigit(int c) {
if ((c >= 0x0030) && (c <= 0x0039)) { // basic latin digits
return true;
} else { // [TBD] - not complete
return false;
}
}
/**
* Determine if character c belong to the hebrew script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to hebrew script
*/
public static boolean isHebrew(int c) {
if ((c >= 0x0590) && (c <= 0x05FF)) { // hebrew block
return true;
} else if ((c >= 0xFB00) && (c <= 0xFB4F)) { // hebrew presentation forms block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the mongolian script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to mongolian script
*/
public static boolean isMongolian(int c) {
if ((c >= 0x1800) && (c <= 0x18AF)) { // mongolian block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the arabic script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to arabic script
*/
public static boolean isArabic(int c) {
if ((c >= 0x0600) && (c <= 0x06FF)) { // arabic block
return true;
} else if ((c >= 0x0750) && (c <= 0x077F)) { // arabic supplement block
return true;
} else if ((c >= 0xFB50) && (c <= 0xFDFF)) { // arabic presentation forms a block
return true;
} else if ((c >= 0xFE70) && (c <= 0xFEFF)) { // arabic presentation forms b block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the greek script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to greek script
*/
public static boolean isGreek(int c) {
if ((c >= 0x0370) && (c <= 0x03FF)) { // greek (and coptic) block
return true;
} else if ((c >= 0x1F00) && (c <= 0x1FFF)) { // greek extended block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the latin script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to latin script
*/
public static boolean isLatin(int c) {
if ((c >= 0x0041) && (c <= 0x005A)) { // basic latin upper case
return true;
} else if ((c >= 0x0061) && (c <= 0x007A)) { // basic latin lower case
return true;
} else if ((c >= 0x00C0) && (c <= 0x00D6)) { // latin supplement upper case
return true;
} else if ((c >= 0x00D8) && (c <= 0x00DF)) { // latin supplement upper case
return true;
} else if ((c >= 0x00E0) && (c <= 0x00F6)) { // latin supplement lower case
return true;
} else if ((c >= 0x00F8) && (c <= 0x00FF)) { // latin supplement lower case
return true;
} else if ((c >= 0x0100) && (c <= 0x017F)) { // latin extended a
return true;
} else if ((c >= 0x0180) && (c <= 0x024F)) { // latin extended b
return true;
} else if ((c >= 0x1E00) && (c <= 0x1EFF)) { // latin extended additional
return true;
} else if ((c >= 0x2C60) && (c <= 0x2C7F)) { // latin extended c
return true;
} else if ((c >= 0xA720) && (c <= 0xA7FF)) { // latin extended d
return true;
} else if ((c >= 0xFB00) && (c <= 0xFB0F)) { // latin ligatures
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the cyrillic script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to cyrillic script
*/
public static boolean isCyrillic(int c) {
if ((c >= 0x0400) && (c <= 0x04FF)) { // cyrillic block
return true;
} else if ((c >= 0x0500) && (c <= 0x052F)) { // cyrillic supplement block
return true;
} else if ((c >= 0x2DE0) && (c <= 0x2DFF)) { // cyrillic extended-a block
return true;
} else if ((c >= 0xA640) && (c <= 0xA69F)) { // cyrillic extended-b block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the georgian script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to georgian script
*/
public static boolean isGeorgian(int c) {
if ((c >= 0x10A0) && (c <= 0x10FF)) { // georgian block
return true;
} else if ((c >= 0x2D00) && (c <= 0x2D2F)) { // georgian supplement block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the hangul script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to hangul script
*/
public static boolean isHangul(int c) {
if ((c >= 0x1100) && (c <= 0x11FF)) { // hangul jamo
return true;
} else if ((c >= 0x3130) && (c <= 0x318F)) { // hangul compatibility jamo
return true;
} else if ((c >= 0xA960) && (c <= 0xA97F)) { // hangul jamo extended a
return true;
} else if ((c >= 0xAC00) && (c <= 0xD7A3)) { // hangul syllables
return true;
} else if ((c >= 0xD7B0) && (c <= 0xD7FF)) { // hangul jamo extended a
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the gurmukhi script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to gurmukhi script
*/
public static boolean isGurmukhi(int c) {
if ((c >= 0x0A00) && (c <= 0x0A7F)) { // gurmukhi block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the devanagari script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to devanagari script
*/
public static boolean isDevanagari(int c) {
if ((c >= 0x0900) && (c <= 0x097F)) { // devangari block
return true;
} else if ((c >= 0xA8E0) && (c <= 0xA8FF)) { // devangari extended block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the gujarati script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to gujarati script
*/
public static boolean isGujarati(int c) {
if ((c >= 0x0A80) && (c <= 0x0AFF)) { // gujarati block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the bengali script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to bengali script
*/
public static boolean isBengali(int c) {
if ((c >= 0x0980) && (c <= 0x09FF)) { // bengali block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the oriya script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to oriya script
*/
public static boolean isOriya(int c) {
if ((c >= 0x0B00) && (c <= 0x0B7F)) { // oriya block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the tibetan script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to tibetan script
*/
public static boolean isTibetan(int c) {
if ((c >= 0x0F00) && (c <= 0x0FFF)) { // tibetan block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the telugu script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to telugu script
*/
public static boolean isTelugu(int c) {
if ((c >= 0x0C00) && (c <= 0x0C7F)) { // telugu block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the kannada script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to kannada script
*/
public static boolean isKannada(int c) {
if ((c >= 0x0C00) && (c <= 0x0C7F)) { // kannada block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the tamil script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to tamil script
*/
public static boolean isTamil(int c) {
if ((c >= 0x0B80) && (c <= 0x0BFF)) { // tamil block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the malayalam script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to malayalam script
*/
public static boolean isMalayalam(int c) {
if ((c >= 0x0D00) && (c <= 0x0D7F)) { // malayalam block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the sinhalese script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to sinhalese script
*/
public static boolean isSinhalese(int c) {
if ((c >= 0x0D80) && (c <= 0x0DFF)) { // sinhala block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the burmese script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to burmese script
*/
public static boolean isBurmese(int c) {
if ((c >= 0x1000) && (c <= 0x109F)) { // burmese (myanmar) block
return true;
} else if ((c >= 0xAA60) && (c <= 0xAA7F)) { // burmese (myanmar) extended block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the thai script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to thai script
*/
public static boolean isThai(int c) {
if ((c >= 0x0E00) && (c <= 0x0E7F)) { // thai block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the khmer script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to khmer script
*/
public static boolean isKhmer(int c) {
if ((c >= 0x1780) && (c <= 0x17FF)) { // khmer block
return true;
} else if ((c >= 0x19E0) && (c <= 0x19FF)) { // khmer symbols block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the lao script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to lao script
*/
public static boolean isLao(int c) {
if ((c >= 0x0E80) && (c <= 0x0EFF)) { // lao block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the ethiopic (amharic) script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to ethiopic (amharic) script
*/
public static boolean isEthiopic(int c) {
if ((c >= 0x1200) && (c <= 0x137F)) { // ethiopic block
return true;
} else if ((c >= 0x1380) && (c <= 0x139F)) { // ethoipic supplement block
return true;
} else if ((c >= 0x2D80) && (c <= 0x2DDF)) { // ethoipic extended block
return true;
} else if ((c >= 0xAB00) && (c <= 0xAB2F)) { // ethoipic extended-a block
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the han (unified cjk) script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to han (unified cjk) script
*/
public static boolean isHan(int c) {
if ((c >= 0x3400) && (c <= 0x4DBF)) {
return true; // cjk unified ideographs extension a
} else if ((c >= 0x4E00) && (c <= 0x9FFF)) {
return true; // cjk unified ideographs
} else if ((c >= 0xF900) && (c <= 0xFAFF)) {
return true; // cjk compatibility ideographs
} else if ((c >= 0x20000) && (c <= 0x2A6DF)) {
return true; // cjk unified ideographs extension b
} else if ((c >= 0x2A700) && (c <= 0x2B73F)) {
return true; // cjk unified ideographs extension c
} else if ((c >= 0x2F800) && (c <= 0x2FA1F)) {
return true; // cjk compatibility ideographs supplement
} else {
return false;
}
}
/**
* Determine if character c belong to the bopomofo script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to bopomofo script
*/
public static boolean isBopomofo(int c) {
if ((c >= 0x3100) && (c <= 0x312F)) {
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the hiragana script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to hiragana script
*/
public static boolean isHiragana(int c) {
if ((c >= 0x3040) && (c <= 0x309F)) {
return true;
} else {
return false;
}
}
/**
* Determine if character c belong to the katakana script.
* @param c a character represented as a unicode scalar value
* @return true if character belongs to katakana script
*/
public static boolean isKatakana(int c) {
if ((c >= 0x30A0) && (c <= 0x30FF)) {
return true;
} else if ((c >= 0x31F0) && (c <= 0x31FF)) {
return true;
} else {
return false;
}
}
/**
* Obtain ISO15924 numeric script code of character. If script is not or cannot be determined,
* then the script code 998 ('zyyy') is returned.
* @param c the character to obtain script
* @return an ISO15924 script code
*/
public static int scriptOf(int c) { // [TBD] - needs optimization!!!
if (CharUtilities.isAnySpace(c)) {
return SCRIPT_UNDETERMINED;
} else if (isPunctuation(c)) {
return SCRIPT_UNDETERMINED;
} else if (isDigit(c)) {
return SCRIPT_UNDETERMINED;
} else if (isLatin(c)) {
return SCRIPT_LATIN;
} else if (isCyrillic(c)) {
return SCRIPT_CYRILLIC;
} else if (isGreek(c)) {
return SCRIPT_GREEK;
} else if (isHan(c)) {
return SCRIPT_HAN;
} else if (isBopomofo(c)) {
return SCRIPT_BOPOMOFO;
} else if (isKatakana(c)) {
return SCRIPT_KATAKANA;
} else if (isHiragana(c)) {
return SCRIPT_HIRAGANA;
} else if (isHangul(c)) {
return SCRIPT_HANGUL;
} else if (isArabic(c)) {
return SCRIPT_ARABIC;
} else if (isHebrew(c)) {
return SCRIPT_HEBREW;
} else if (isMongolian(c)) {
return SCRIPT_MONGOLIAN;
} else if (isGeorgian(c)) {
return SCRIPT_GEORGIAN;
} else if (isGurmukhi(c)) {
return useV2IndicRules(SCRIPT_GURMUKHI);
} else if (isDevanagari(c)) {
return useV2IndicRules(SCRIPT_DEVANAGARI);
} else if (isGujarati(c)) {
return useV2IndicRules(SCRIPT_GUJARATI);
} else if (isBengali(c)) {
return useV2IndicRules(SCRIPT_BENGALI);
} else if (isOriya(c)) {
return useV2IndicRules(SCRIPT_ORIYA);
} else if (isTibetan(c)) {
return SCRIPT_TIBETAN;
} else if (isTelugu(c)) {
return useV2IndicRules(SCRIPT_TELUGU);
} else if (isKannada(c)) {
return useV2IndicRules(SCRIPT_KANNADA);
} else if (isTamil(c)) {
return useV2IndicRules(SCRIPT_TAMIL);
} else if (isMalayalam(c)) {
return useV2IndicRules(SCRIPT_MALAYALAM);
} else if (isSinhalese(c)) {
return SCRIPT_SINHALESE;
} else if (isBurmese(c)) {
return SCRIPT_BURMESE;
} else if (isThai(c)) {
return SCRIPT_THAI;
} else if (isKhmer(c)) {
return SCRIPT_KHMER;
} else if (isLao(c)) {
return SCRIPT_LAO;
} else if (isEthiopic(c)) {
return SCRIPT_ETHIOPIC;
} else {
return SCRIPT_UNDETERMINED;
}
}
/**
* Obtain the V2 indic script code corresponding to V1 indic script code SC if
* and only iff V2 indic rules apply; otherwise return SC.
* @param sc a V1 indic script code
* @return either SC or the V2 flavor of SC if V2 indic rules apply
*/
public static int useV2IndicRules(int sc) {
if (USE_V2_INDIC) {
return (sc < 1000) ? (sc + 1000) : sc;
} else {
return sc;
}
}
/**
* Obtain the script codes of each character in a character sequence. If script
* is not or cannot be determined for some character, then the script code 998
* ('zyyy') is returned.
* @param cs the character sequence
* @return a (possibly empty) array of script codes
*/
public static int[] scriptsOf(CharSequence cs) {
Set s = new HashSet();
for (int i = 0, n = cs.length(); i < n; i++) {
s.add(scriptOf(cs.charAt(i)));
}
int[] sa = new int [ s.size() ];
int ns = 0;
for (Object value : s) {
sa[ns++] = (Integer) value;
}
Arrays.sort(sa);
return sa;
}
/**
* Determine the dominant script of a character sequence.
* @param cs the character sequence
* @return the dominant script or SCRIPT_UNDETERMINED
*/
public static int dominantScript(CharSequence cs) {
Map m = new HashMap();
for (int i = 0, n = cs.length(); i < n; i++) {
int c = cs.charAt(i);
int s = scriptOf(c);
Integer k = s;
Integer v = (Integer) m.get(k);
if (v != null) {
m.put(k, v + 1);
} else {
m.put(k, 0);
}
}
int sMax = -1;
int cMax = -1;
for (Object o : m.entrySet()) {
Map.Entry e = (Map.Entry) o;
Integer k = (Integer) e.getKey();
int s = k;
switch (s) {
case SCRIPT_UNDETERMINED:
case SCRIPT_UNCODED:
break;
default:
Integer v = (Integer) e.getValue();
assert v != null;
int c = v;
if (c > cMax) {
cMax = c;
sMax = s;
}
break;
}
}
if (sMax < 0) {
sMax = SCRIPT_UNDETERMINED;
}
return sMax;
}
/**
* Determine if script tag denotes an 'Indic' script, where a
* script is an 'Indic' script if it is intended to be processed by
* the generic 'Indic' Script Processor.
* @param script a script tag
* @return true if script tag is a designated 'Indic' script
*/
public static boolean isIndicScript(String script) {
return isIndicScript(scriptCodeFromTag(script));
}
/**
* Determine if script tag denotes an 'Indic' script, where a
* script is an 'Indic' script if it is intended to be processed by
* the generic 'Indic' Script Processor.
* @param script a script code
* @return true if script code is a designated 'Indic' script
*/
public static boolean isIndicScript(int script) {
switch (script) {
case SCRIPT_BENGALI:
case SCRIPT_BENGALI_2:
case SCRIPT_BURMESE:
case SCRIPT_DEVANAGARI:
case SCRIPT_DEVANAGARI_2:
case SCRIPT_GUJARATI:
case SCRIPT_GUJARATI_2:
case SCRIPT_GURMUKHI:
case SCRIPT_GURMUKHI_2:
case SCRIPT_KANNADA:
case SCRIPT_KANNADA_2:
case SCRIPT_MALAYALAM:
case SCRIPT_MALAYALAM_2:
case SCRIPT_ORIYA:
case SCRIPT_ORIYA_2:
case SCRIPT_TAMIL:
case SCRIPT_TAMIL_2:
case SCRIPT_TELUGU:
case SCRIPT_TELUGU_2:
case SCRIPT_KHMER:
return true;
default:
return false;
}
}
/**
* Determine the script tag associated with an internal script code.
* @param code the script code
* @return a script tag
*/
public static String scriptTagFromCode(int code) {
Map<Integer, String> m = getScriptTagsMap();
if (m != null) {
String tag;
if ((tag = m.get(code)) != null) {
return tag;
} else {
return "";
}
} else {
return "";
}
}
/**
* Determine the internal script code associated with a script tag.
* @param tag the script tag
* @return a script code
*/
public static int scriptCodeFromTag(String tag) {
Map<String, Integer> m = getScriptCodeMap();
if (m != null) {
Integer c;
if ((c = m.get(tag)) != null) {
return (int) c;
} else {
return SCRIPT_UNDETERMINED;
}
} else {
return SCRIPT_UNDETERMINED;
}
}
private static Map<Integer, String> scriptTagsMap;
private static Map<String, Integer> scriptCodeMap;
private static void putScriptTag(Map tm, Map cm, int code, String tag) {
assert tag != null;
assert tag.length() != 0;
assert code >= 0;
assert code < 2000;
tm.put(code, tag);
cm.put(tag, code);
}
private static void makeScriptMaps() {
HashMap<Integer, String> tm = new HashMap<Integer, String>();
HashMap<String, Integer> cm = new HashMap<String, Integer>();
putScriptTag(tm, cm, SCRIPT_HEBREW, "hebr");
putScriptTag(tm, cm, SCRIPT_MONGOLIAN, "mong");
putScriptTag(tm, cm, SCRIPT_ARABIC, "arab");
putScriptTag(tm, cm, SCRIPT_GREEK, "grek");
putScriptTag(tm, cm, SCRIPT_LATIN, "latn");
putScriptTag(tm, cm, SCRIPT_CYRILLIC, "cyrl");
putScriptTag(tm, cm, SCRIPT_GEORGIAN, "geor");
putScriptTag(tm, cm, SCRIPT_BOPOMOFO, "bopo");
putScriptTag(tm, cm, SCRIPT_HANGUL, "hang");
putScriptTag(tm, cm, SCRIPT_GURMUKHI, "guru");
putScriptTag(tm, cm, SCRIPT_GURMUKHI_2, "gur2");
putScriptTag(tm, cm, SCRIPT_DEVANAGARI, "deva");
putScriptTag(tm, cm, SCRIPT_DEVANAGARI_2, "dev2");
putScriptTag(tm, cm, SCRIPT_GUJARATI, "gujr");
putScriptTag(tm, cm, SCRIPT_GUJARATI_2, "gjr2");
putScriptTag(tm, cm, SCRIPT_BENGALI, "beng");
putScriptTag(tm, cm, SCRIPT_BENGALI_2, "bng2");
putScriptTag(tm, cm, SCRIPT_ORIYA, "orya");
putScriptTag(tm, cm, SCRIPT_ORIYA_2, "ory2");
putScriptTag(tm, cm, SCRIPT_TIBETAN, "tibt");
putScriptTag(tm, cm, SCRIPT_TELUGU, "telu");
putScriptTag(tm, cm, SCRIPT_TELUGU_2, "tel2");
putScriptTag(tm, cm, SCRIPT_KANNADA, "knda");
putScriptTag(tm, cm, SCRIPT_KANNADA_2, "knd2");
putScriptTag(tm, cm, SCRIPT_TAMIL, "taml");
putScriptTag(tm, cm, SCRIPT_TAMIL_2, "tml2");
putScriptTag(tm, cm, SCRIPT_MALAYALAM, "mlym");
putScriptTag(tm, cm, SCRIPT_MALAYALAM_2, "mlm2");
putScriptTag(tm, cm, SCRIPT_SINHALESE, "sinh");
putScriptTag(tm, cm, SCRIPT_BURMESE, "mymr");
putScriptTag(tm, cm, SCRIPT_THAI, "thai");
putScriptTag(tm, cm, SCRIPT_KHMER, "khmr");
putScriptTag(tm, cm, SCRIPT_LAO, "laoo");
putScriptTag(tm, cm, SCRIPT_HIRAGANA, "hira");
putScriptTag(tm, cm, SCRIPT_ETHIOPIC, "ethi");
putScriptTag(tm, cm, SCRIPT_HAN, "hani");
putScriptTag(tm, cm, SCRIPT_KATAKANA, "kana");
putScriptTag(tm, cm, SCRIPT_MATH, "zmth");
putScriptTag(tm, cm, SCRIPT_SYMBOL, "zsym");
putScriptTag(tm, cm, SCRIPT_UNDETERMINED, "zyyy");
putScriptTag(tm, cm, SCRIPT_UNCODED, "zzzz");
scriptTagsMap = tm;
scriptCodeMap = cm;
}
private static Map<Integer, String> getScriptTagsMap() {
if (scriptTagsMap == null) {
makeScriptMaps();
}
return scriptTagsMap;
}
private static Map<String, Integer> getScriptCodeMap() {
if (scriptCodeMap == null) {
makeScriptMaps();
}
return scriptCodeMap;
}
}
|
googleapis/google-cloud-java | 35,256 | java-asset/proto-google-cloud-asset-v1/src/main/java/com/google/cloud/asset/v1/VersionedResource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1/assets.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1;
/**
*
*
* <pre>
* Resource representation as defined by the corresponding service providing the
* resource for a given API version.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.VersionedResource}
*/
public final class VersionedResource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1.VersionedResource)
VersionedResourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use VersionedResource.newBuilder() to construct.
private VersionedResource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VersionedResource() {
version_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new VersionedResource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_VersionedResource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_VersionedResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.VersionedResource.class,
com.google.cloud.asset.v1.VersionedResource.Builder.class);
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 2;
private com.google.protobuf.Struct resource_;
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*
* @return Whether the resource field is set.
*/
@java.lang.Override
public boolean hasResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*
* @return The resource.
*/
@java.lang.Override
public com.google.protobuf.Struct getResource() {
return resource_ == null ? com.google.protobuf.Struct.getDefaultInstance() : resource_;
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.StructOrBuilder getResourceOrBuilder() {
return resource_ == null ? com.google.protobuf.Struct.getDefaultInstance() : resource_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, version_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, version_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1.VersionedResource)) {
return super.equals(obj);
}
com.google.cloud.asset.v1.VersionedResource other =
(com.google.cloud.asset.v1.VersionedResource) obj;
if (!getVersion().equals(other.getVersion())) return false;
if (hasResource() != other.hasResource()) return false;
if (hasResource()) {
if (!getResource().equals(other.getResource())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
if (hasResource()) {
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.VersionedResource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.VersionedResource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.VersionedResource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1.VersionedResource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Resource representation as defined by the corresponding service providing the
* resource for a given API version.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.VersionedResource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1.VersionedResource)
com.google.cloud.asset.v1.VersionedResourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_VersionedResource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_VersionedResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.VersionedResource.class,
com.google.cloud.asset.v1.VersionedResource.Builder.class);
}
// Construct using com.google.cloud.asset.v1.VersionedResource.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = "";
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_VersionedResource_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1.VersionedResource getDefaultInstanceForType() {
return com.google.cloud.asset.v1.VersionedResource.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1.VersionedResource build() {
com.google.cloud.asset.v1.VersionedResource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1.VersionedResource buildPartial() {
com.google.cloud.asset.v1.VersionedResource result =
new com.google.cloud.asset.v1.VersionedResource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1.VersionedResource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = version_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.resource_ = resourceBuilder_ == null ? resource_ : resourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1.VersionedResource) {
return mergeFrom((com.google.cloud.asset.v1.VersionedResource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1.VersionedResource other) {
if (other == com.google.cloud.asset.v1.VersionedResource.getDefaultInstance()) return this;
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasResource()) {
mergeResource(other.getResource());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getResourceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* API version of the resource.
*
* Example:
* If the resource is an instance provided by Compute Engine v1 API as defined
* in `https://cloud.google.com/compute/docs/reference/rest/v1/instances`,
* version will be "v1".
* </pre>
*
* <code>string version = 1;</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Struct resource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
resourceBuilder_;
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*
* @return Whether the resource field is set.
*/
public boolean hasResource() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*
* @return The resource.
*/
public com.google.protobuf.Struct getResource() {
if (resourceBuilder_ == null) {
return resource_ == null ? com.google.protobuf.Struct.getDefaultInstance() : resource_;
} else {
return resourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public Builder setResource(com.google.protobuf.Struct value) {
if (resourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
} else {
resourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public Builder setResource(com.google.protobuf.Struct.Builder builderForValue) {
if (resourceBuilder_ == null) {
resource_ = builderForValue.build();
} else {
resourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public Builder mergeResource(com.google.protobuf.Struct value) {
if (resourceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& resource_ != null
&& resource_ != com.google.protobuf.Struct.getDefaultInstance()) {
getResourceBuilder().mergeFrom(value);
} else {
resource_ = value;
}
} else {
resourceBuilder_.mergeFrom(value);
}
if (resource_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public Builder clearResource() {
bitField0_ = (bitField0_ & ~0x00000002);
resource_ = null;
if (resourceBuilder_ != null) {
resourceBuilder_.dispose();
resourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public com.google.protobuf.Struct.Builder getResourceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
public com.google.protobuf.StructOrBuilder getResourceOrBuilder() {
if (resourceBuilder_ != null) {
return resourceBuilder_.getMessageOrBuilder();
} else {
return resource_ == null ? com.google.protobuf.Struct.getDefaultInstance() : resource_;
}
}
/**
*
*
* <pre>
* JSON representation of the resource as defined by the corresponding
* service providing this resource.
*
* Example:
* If the resource is an instance provided by Compute Engine, this field will
* contain the JSON representation of the instance as defined by Compute
* Engine:
* `https://cloud.google.com/compute/docs/reference/rest/v1/instances`.
*
* You can find the resource definition for each supported resource type in
* this table:
* `https://cloud.google.com/asset-inventory/docs/supported-asset-types`
* </pre>
*
* <code>.google.protobuf.Struct resource = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
getResourceFieldBuilder() {
if (resourceBuilder_ == null) {
resourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>(
getResource(), getParentForChildren(), isClean());
resource_ = null;
}
return resourceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1.VersionedResource)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1.VersionedResource)
private static final com.google.cloud.asset.v1.VersionedResource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1.VersionedResource();
}
public static com.google.cloud.asset.v1.VersionedResource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VersionedResource> PARSER =
new com.google.protobuf.AbstractParser<VersionedResource>() {
@java.lang.Override
public VersionedResource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VersionedResource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VersionedResource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1.VersionedResource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,173 | java-alloydb/proto-google-cloud-alloydb-v1alpha/src/main/java/com/google/cloud/alloydb/v1alpha/CreateDatabaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1alpha/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1alpha;
/**
*
*
* <pre>
* Message for CreateDatabase request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.CreateDatabaseRequest}
*/
public final class CreateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1alpha.CreateDatabaseRequest)
CreateDatabaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateDatabaseRequest.newBuilder() to construct.
private CreateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateDatabaseRequest() {
parent_ = "";
databaseId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateDatabaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_CreateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.class,
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATABASE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object databaseId_ = "";
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The databaseId.
*/
@java.lang.Override
public java.lang.String getDatabaseId() {
java.lang.Object ref = databaseId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for databaseId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDatabaseIdBytes() {
java.lang.Object ref = databaseId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATABASE_FIELD_NUMBER = 3;
private com.google.cloud.alloydb.v1alpha.Database database_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
@java.lang.Override
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.Database getDatabase() {
return database_ == null
? com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance()
: database_;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder getDatabaseOrBuilder() {
return database_ == null
? com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance()
: database_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, databaseId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getDatabase());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, databaseId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDatabase());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest other =
(com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getDatabaseId().equals(other.getDatabaseId())) return false;
if (hasDatabase() != other.hasDatabase()) return false;
if (hasDatabase()) {
if (!getDatabase().equals(other.getDatabase())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + DATABASE_ID_FIELD_NUMBER;
hash = (53 * hash) + getDatabaseId().hashCode();
if (hasDatabase()) {
hash = (37 * hash) + DATABASE_FIELD_NUMBER;
hash = (53 * hash) + getDatabase().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for CreateDatabase request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.CreateDatabaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1alpha.CreateDatabaseRequest)
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_CreateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.class,
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatabaseFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
databaseId_ = "";
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest build() {
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest buildPartial() {
com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest result =
new com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.databaseId_ = databaseId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest) {
return mergeFrom((com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest other) {
if (other == com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDatabaseId().isEmpty()) {
databaseId_ = other.databaseId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasDatabase()) {
mergeDatabase(other.getDatabase());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
databaseId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object databaseId_ = "";
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The databaseId.
*/
public java.lang.String getDatabaseId() {
java.lang.Object ref = databaseId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for databaseId.
*/
public com.google.protobuf.ByteString getDatabaseIdBytes() {
java.lang.Object ref = databaseId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The databaseId to set.
* @return This builder for chaining.
*/
public Builder setDatabaseId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
databaseId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDatabaseId() {
databaseId_ = getDefaultInstance().getDatabaseId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the requesting object.
* </pre>
*
* <code>string database_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for databaseId to set.
* @return This builder for chaining.
*/
public Builder setDatabaseIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
databaseId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.alloydb.v1alpha.Database database_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
databaseBuilder_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
public boolean hasDatabase() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
public com.google.cloud.alloydb.v1alpha.Database getDatabase() {
if (databaseBuilder_ == null) {
return database_ == null
? com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance()
: database_;
} else {
return databaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(com.google.cloud.alloydb.v1alpha.Database value) {
if (databaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
database_ = value;
} else {
databaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(com.google.cloud.alloydb.v1alpha.Database.Builder builderForValue) {
if (databaseBuilder_ == null) {
database_ = builderForValue.build();
} else {
databaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDatabase(com.google.cloud.alloydb.v1alpha.Database value) {
if (databaseBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& database_ != null
&& database_ != com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance()) {
getDatabaseBuilder().mergeFrom(value);
} else {
database_ = value;
}
} else {
databaseBuilder_.mergeFrom(value);
}
if (database_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDatabase() {
bitField0_ = (bitField0_ & ~0x00000004);
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.alloydb.v1alpha.Database.Builder getDatabaseBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getDatabaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder getDatabaseOrBuilder() {
if (databaseBuilder_ != null) {
return databaseBuilder_.getMessageOrBuilder();
} else {
return database_ == null
? com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance()
: database_;
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1alpha.Database database = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
getDatabaseFieldBuilder() {
if (databaseBuilder_ == null) {
databaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>(
getDatabase(), getParentForChildren(), isClean());
database_ = null;
}
return databaseBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1alpha.CreateDatabaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1alpha.CreateDatabaseRequest)
private static final com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest();
}
public static com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateDatabaseRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateDatabaseRequest>() {
@java.lang.Override
public CreateDatabaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateDatabaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateDatabaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.CreateDatabaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,239 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/UpdateVersionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/version.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [Versions.UpdateVersion][google.cloud.dialogflow.v2.Versions.UpdateVersion].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.UpdateVersionRequest}
*/
public final class UpdateVersionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.UpdateVersionRequest)
UpdateVersionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateVersionRequest.newBuilder() to construct.
private UpdateVersionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateVersionRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateVersionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.VersionProto
.internal_static_google_cloud_dialogflow_v2_UpdateVersionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.VersionProto
.internal_static_google_cloud_dialogflow_v2_UpdateVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.UpdateVersionRequest.class,
com.google.cloud.dialogflow.v2.UpdateVersionRequest.Builder.class);
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.v2.Version version_;
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.Version getVersion() {
return version_ == null
? com.google.cloud.dialogflow.v2.Version.getDefaultInstance()
: version_;
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.VersionOrBuilder getVersionOrBuilder() {
return version_ == null
? com.google.cloud.dialogflow.v2.Version.getDefaultInstance()
: version_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.UpdateVersionRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.UpdateVersionRequest other =
(com.google.cloud.dialogflow.v2.UpdateVersionRequest) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (!getVersion().equals(other.getVersion())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.UpdateVersionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Versions.UpdateVersion][google.cloud.dialogflow.v2.Versions.UpdateVersion].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.UpdateVersionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.UpdateVersionRequest)
com.google.cloud.dialogflow.v2.UpdateVersionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.VersionProto
.internal_static_google_cloud_dialogflow_v2_UpdateVersionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.VersionProto
.internal_static_google_cloud_dialogflow_v2_UpdateVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.UpdateVersionRequest.class,
com.google.cloud.dialogflow.v2.UpdateVersionRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.UpdateVersionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVersionFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.VersionProto
.internal_static_google_cloud_dialogflow_v2_UpdateVersionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateVersionRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.UpdateVersionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateVersionRequest build() {
com.google.cloud.dialogflow.v2.UpdateVersionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateVersionRequest buildPartial() {
com.google.cloud.dialogflow.v2.UpdateVersionRequest result =
new com.google.cloud.dialogflow.v2.UpdateVersionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2.UpdateVersionRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.UpdateVersionRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.UpdateVersionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.UpdateVersionRequest other) {
if (other == com.google.cloud.dialogflow.v2.UpdateVersionRequest.getDefaultInstance())
return this;
if (other.hasVersion()) {
mergeVersion(other.getVersion());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.v2.Version version_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.Version,
com.google.cloud.dialogflow.v2.Version.Builder,
com.google.cloud.dialogflow.v2.VersionOrBuilder>
versionBuilder_;
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
public com.google.cloud.dialogflow.v2.Version getVersion() {
if (versionBuilder_ == null) {
return version_ == null
? com.google.cloud.dialogflow.v2.Version.getDefaultInstance()
: version_;
} else {
return versionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVersion(com.google.cloud.dialogflow.v2.Version value) {
if (versionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
} else {
versionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVersion(com.google.cloud.dialogflow.v2.Version.Builder builderForValue) {
if (versionBuilder_ == null) {
version_ = builderForValue.build();
} else {
versionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeVersion(com.google.cloud.dialogflow.v2.Version value) {
if (versionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& version_ != null
&& version_ != com.google.cloud.dialogflow.v2.Version.getDefaultInstance()) {
getVersionBuilder().mergeFrom(value);
} else {
version_ = value;
}
} else {
versionBuilder_.mergeFrom(value);
}
if (version_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.Version.Builder getVersionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getVersionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.VersionOrBuilder getVersionOrBuilder() {
if (versionBuilder_ != null) {
return versionBuilder_.getMessageOrBuilder();
} else {
return version_ == null
? com.google.cloud.dialogflow.v2.Version.getDefaultInstance()
: version_;
}
}
/**
*
*
* <pre>
* Required. The version to update.
* Supported formats:
*
* - `projects/<Project ID>/agent/versions/<Version ID>`
* - `projects/<Project ID>/locations/<Location ID>/agent/versions/<Version
* ID>`
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.Version version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.Version,
com.google.cloud.dialogflow.v2.Version.Builder,
com.google.cloud.dialogflow.v2.VersionOrBuilder>
getVersionFieldBuilder() {
if (versionBuilder_ == null) {
versionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.Version,
com.google.cloud.dialogflow.v2.Version.Builder,
com.google.cloud.dialogflow.v2.VersionOrBuilder>(
getVersion(), getParentForChildren(), isClean());
version_ = null;
}
return versionBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.UpdateVersionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.UpdateVersionRequest)
private static final com.google.cloud.dialogflow.v2.UpdateVersionRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.UpdateVersionRequest();
}
public static com.google.cloud.dialogflow.v2.UpdateVersionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateVersionRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateVersionRequest>() {
@java.lang.Override
public UpdateVersionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateVersionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateVersionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.UpdateVersionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 35,382 | classlib/modules/text/src/main/java/java/text/DateFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.text;
import java.io.InvalidObjectException;
import java.util.Calendar;
import java.util.Date;
import java.util.Hashtable;
import java.util.Locale;
import java.util.TimeZone;
import org.apache.harmony.text.internal.nls.Messages;
/**
* An abstract class for date/time formatting subclasses which formats and
* parses dates or time in a language-independent manner. The date/time
* formatting subclass, such as {@link SimpleDateFormat}, allows for formatting
* (i.e., date -> text), parsing (text -> date), and normalization. The date is
* represented as a {@code Date} object or as the milliseconds since January 1,
* 1970, 00:00:00 GMT.
* <p>
* DateFormat provides many class methods for obtaining default date/time
* formatters based on the default or a given locale and a number of formatting
* styles. The formatting styles include FULL, LONG, MEDIUM, and SHORT. More
* details and examples for using these styles are provided in the method
* descriptions.
* <p>
* {@code DateFormat} helps you to format and parse dates for any locale. Your
* code can be completely independent of the locale conventions for months, days
* of the week, or even the calendar format: lunar vs. solar.
* <p>
* To format a date for the current Locale, use one of the static factory
* methods:
* <blockquote>
*
* <pre>
* myString = DateFormat.getDateInstance().format(myDate);
* </pre>
*
* </blockquote>
* <p>
* If you are formatting multiple dates, it is more efficient to get the format
* and use it multiple times so that the system doesn't have to fetch the
* information about the local language and country conventions multiple times.
* <blockquote>
*
* <pre>
* DateFormat df = DateFormat.getDateInstance();
* for (int i = 0; i < a.length; ++i) {
* output.println(df.format(myDate[i]) + "; ");
* }
* </pre>
*
* </blockquote>
* <p>
* To format a number for a different locale, specify it in the call to
* {@code getDateInstance}:
* <blockquote>
*
* <pre>
* DateFormat df = DateFormat.getDateInstance(DateFormat.LONG, Locale.FRANCE);
* </pre>
*
* </blockquote>
* <p>
* {@code DateFormat} can also be used to parse strings:
* <blockquote>
*
* <pre>
* myDate = df.parse(myString);
* </pre>
*
* </blockquote>
* <p>
* Use {@code getDateInstance} to get the normal date format for a country.
* Other static factory methods are available: Use {@code getTimeInstance} to
* get the time format for a country. Use {@code getDateTimeInstance} to get the
* date and time format. You can pass in different options to these factory
* methods to control the length of the result; from SHORT to MEDIUM to LONG to
* FULL. The exact result depends on the locale, but generally:
* <ul>
* <li>SHORT is completely numeric, such as 12.13.52 or 3:30pm
* <li>MEDIUM is longer, such as Jan 12, 1952
* <li>LONG is longer, such as January 12, 1952 or 3:30:32pm
* <li>FULL is pretty completely specified, such as Tuesday, April 12, 1952 AD
* or 3:30:42pm PST.
* </ul>
* <p>
* If needed, the time zone can be set on the format. For even greater control
* over the formatting or parsing, try casting the {@code DateFormat} you get
* from the factory methods to a {@code SimpleDateFormat}. This will work for
* the majority of countries; just remember to put it in a try block in case you
* encounter an unusual one.
* <p>
* There are versions of the parse and format methods which use
* {@code ParsePosition} and {@code FieldPosition} to allow you to
* <ul>
* <li>progressively parse through pieces of a string;
* <li>align any particular field.
* </ul>
* <h4>Synchronization</h4>
* <p>
* Date formats are not synchronized. It is recommended to create separate
* format instances for each thread. If multiple threads access a format
* concurrently, it must be synchronized externally.
*
* @see NumberFormat
* @see SimpleDateFormat
* @see Calendar
* @see TimeZone
*/
public abstract class DateFormat extends Format {
private static final long serialVersionUID = 7218322306649953788L;
/**
* The calendar that this {@code DateFormat} uses to format a number
* representing a date.
*/
protected Calendar calendar;
/**
* The number format used to format a number.
*/
protected NumberFormat numberFormat;
/**
* The format style constant defining the default format style. The default
* is MEDIUM.
*/
public final static int DEFAULT = 2;
/**
* The format style constant defining the full style.
*/
public final static int FULL = 0;
/**
* The format style constant defining the long style.
*/
public final static int LONG = 1;
/**
* The format style constant defining the medium style.
*/
public final static int MEDIUM = 2;
/**
* The format style constant defining the short style.
*/
public final static int SHORT = 3;
/**
* The {@code FieldPosition} selector for 'G' field alignment, corresponds
* to the {@link Calendar#ERA} field.
*/
public final static int ERA_FIELD = 0;
/**
* The {@code FieldPosition} selector for 'y' field alignment, corresponds
* to the {@link Calendar#YEAR} field.
*/
public final static int YEAR_FIELD = 1;
/**
* The {@code FieldPosition} selector for 'M' field alignment, corresponds
* to the {@link Calendar#MONTH} field.
*/
public final static int MONTH_FIELD = 2;
/**
* The {@code FieldPosition} selector for 'd' field alignment, corresponds
* to the {@link Calendar#DATE} field.
*/
public final static int DATE_FIELD = 3;
/**
* The {@code FieldPosition} selector for 'k' field alignment, corresponds
* to the {@link Calendar#HOUR_OF_DAY} field. {@code HOUR_OF_DAY1_FIELD} is
* used for the one-based 24-hour clock. For example, 23:59 + 01:00 results
* in 24:59.
*/
public final static int HOUR_OF_DAY1_FIELD = 4;
/**
* The {@code FieldPosition} selector for 'H' field alignment, corresponds
* to the {@link Calendar#HOUR_OF_DAY} field. {@code HOUR_OF_DAY0_FIELD} is
* used for the zero-based 24-hour clock. For example, 23:59 + 01:00 results
* in 00:59.
*/
public final static int HOUR_OF_DAY0_FIELD = 5;
/**
* FieldPosition selector for 'm' field alignment, corresponds to the
* {@link Calendar#MINUTE} field.
*/
public final static int MINUTE_FIELD = 6;
/**
* FieldPosition selector for 's' field alignment, corresponds to the
* {@link Calendar#SECOND} field.
*/
public final static int SECOND_FIELD = 7;
/**
* FieldPosition selector for 'S' field alignment, corresponds to the
* {@link Calendar#MILLISECOND} field.
*/
public final static int MILLISECOND_FIELD = 8;
/**
* FieldPosition selector for 'E' field alignment, corresponds to the
* {@link Calendar#DAY_OF_WEEK} field.
*/
public final static int DAY_OF_WEEK_FIELD = 9;
/**
* FieldPosition selector for 'D' field alignment, corresponds to the
* {@link Calendar#DAY_OF_YEAR} field.
*/
public final static int DAY_OF_YEAR_FIELD = 10;
/**
* FieldPosition selector for 'F' field alignment, corresponds to the
* {@link Calendar#DAY_OF_WEEK_IN_MONTH} field.
*/
public final static int DAY_OF_WEEK_IN_MONTH_FIELD = 11;
/**
* FieldPosition selector for 'w' field alignment, corresponds to the
* {@link Calendar#WEEK_OF_YEAR} field.
*/
public final static int WEEK_OF_YEAR_FIELD = 12;
/**
* FieldPosition selector for 'W' field alignment, corresponds to the
* {@link Calendar#WEEK_OF_MONTH} field.
*/
public final static int WEEK_OF_MONTH_FIELD = 13;
/**
* FieldPosition selector for 'a' field alignment, corresponds to the
* {@link Calendar#AM_PM} field.
*/
public final static int AM_PM_FIELD = 14;
/**
* FieldPosition selector for 'h' field alignment, corresponding to the
* {@link Calendar#HOUR} field. {@code HOUR1_FIELD} is used for the
* one-based 12-hour clock. For example, 11:30 PM + 1 hour results in 12:30
* AM.
*/
public final static int HOUR1_FIELD = 15;
/**
* The {@code FieldPosition} selector for 'z' field alignment, corresponds
* to the {@link Calendar#ZONE_OFFSET} and {@link Calendar#DST_OFFSET}
* fields.
*/
public final static int HOUR0_FIELD = 16;
/**
* The {@code FieldPosition} selector for 'z' field alignment, corresponds
* to the {@link Calendar#ZONE_OFFSET} and {@link Calendar#DST_OFFSET}
* fields.
*/
public final static int TIMEZONE_FIELD = 17;
/**
* Constructs a new instance of {@code DateFormat}.
*/
protected DateFormat() {
}
/**
* Returns a new instance of {@code DateFormat} with the same properties.
*
* @return a shallow copy of this {@code DateFormat}.
*
* @see java.lang.Cloneable
*/
@Override
public Object clone() {
DateFormat clone = (DateFormat) super.clone();
clone.calendar = (Calendar) calendar.clone();
clone.numberFormat = (NumberFormat) numberFormat.clone();
return clone;
}
/**
* Compares this date format with the specified object and indicates if they
* are equal.
*
* @param object
* the object to compare with this date format.
* @return {@code true} if {@code object} is a {@code DateFormat} object and
* it has the same properties as this date format; {@code false}
* otherwise.
* @see #hashCode
*/
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (!(object instanceof DateFormat)) {
return false;
}
DateFormat dateFormat = (DateFormat) object;
return numberFormat.equals(dateFormat.numberFormat)
&& calendar.getTimeZone().equals(
dateFormat.calendar.getTimeZone())
&& calendar.getFirstDayOfWeek() == dateFormat.calendar
.getFirstDayOfWeek()
&& calendar.getMinimalDaysInFirstWeek() == dateFormat.calendar
.getMinimalDaysInFirstWeek()
&& calendar.isLenient() == dateFormat.calendar.isLenient();
}
/**
* Formats the specified object as a string using the pattern of this date
* format and appends the string to the specified string buffer.
* <p>
* If the {@code field} member of {@code field} contains a value specifying
* a format field, then its {@code beginIndex} and {@code endIndex} members
* will be updated with the position of the first occurrence of this field
* in the formatted text.
*
* @param object
* the source object to format, must be a {@code Date} or a
* {@code Number}. If {@code object} is a number then a date is
* constructed using the {@code longValue()} of the number.
* @param buffer
* the target string buffer to append the formatted date/time to.
* @param field
* on input: an optional alignment field; on output: the offsets
* of the alignment field in the formatted text.
* @return the string buffer.
* @throws IllegalArgumentException
* if {@code object} is neither a {@code Date} nor a
* {@code Number} instance.
*/
@Override
public final StringBuffer format(Object object, StringBuffer buffer,
FieldPosition field) {
if (object instanceof Date) {
return format((Date) object, buffer, field);
}
if (object instanceof Number) {
return format(new Date(((Number) object).longValue()), buffer,
field);
}
throw new IllegalArgumentException();
}
/**
* Formats the specified date using the rules of this date format.
*
* @param date
* the date to format.
* @return the formatted string.
*/
public final String format(Date date) {
return format(date, new StringBuffer(), new FieldPosition(0))
.toString();
}
/**
* Formats the specified date as a string using the pattern of this date
* format and appends the string to the specified string buffer.
* <p>
* If the {@code field} member of {@code field} contains a value specifying
* a format field, then its {@code beginIndex} and {@code endIndex} members
* will be updated with the position of the first occurrence of this field
* in the formatted text.
*
* @param date
* the date to format.
* @param buffer
* the target string buffer to append the formatted date/time to.
* @param field
* on input: an optional alignment field; on output: the offsets
* of the alignment field in the formatted text.
* @return the string buffer.
*/
public abstract StringBuffer format(Date date, StringBuffer buffer,
FieldPosition field);
/**
* Gets the list of installed locales which support {@code DateFormat}.
*
* @return an array of locales.
*/
public static Locale[] getAvailableLocales() {
return Locale.getAvailableLocales();
}
/**
* Returns the calendar used by this {@code DateFormat}.
*
* @return the calendar used by this date format.
*/
public Calendar getCalendar() {
return calendar;
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates in
* the DEFAULT style for the default locale.
*
* @return the {@code DateFormat} instance for the default style and locale.
*/
public final static DateFormat getDateInstance() {
return getDateInstance(DEFAULT);
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates in
* the specified style for the default locale.
*
* @param style
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @return the {@code DateFormat} instance for {@code style} and the default
* locale.
* @throws IllegalArgumentException
* if {@code style} is not one of SHORT, MEDIUM, LONG, FULL, or
* DEFAULT.
*/
public final static DateFormat getDateInstance(int style) {
checkDateStyle(style);
return getDateInstance(style, Locale.getDefault());
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates in
* the specified style for the specified locale.
*
* @param style
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @param locale
* the locale.
* @throws IllegalArgumentException
* if {@code style} is not one of SHORT, MEDIUM, LONG, FULL, or
* DEFAULT.
* @return the {@code DateFormat} instance for {@code style} and
* {@code locale}.
*/
public final static DateFormat getDateInstance(int style, Locale locale) {
checkDateStyle(style);
com.ibm.icu.text.DateFormat icuFormat = com.ibm.icu.text.DateFormat.getDateInstance(style, locale);
return new SimpleDateFormat(locale, (com.ibm.icu.text.SimpleDateFormat)icuFormat);
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates
* and time values in the DEFAULT style for the default locale.
*
* @return the {@code DateFormat} instance for the default style and locale.
*/
public final static DateFormat getDateTimeInstance() {
return getDateTimeInstance(DEFAULT, DEFAULT);
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing of both
* dates and time values in the manner appropriate for the default locale.
*
* @param dateStyle
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @param timeStyle
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @return the {@code DateFormat} instance for {@code dateStyle},
* {@code timeStyle} and the default locale.
* @throws IllegalArgumentException
* if {@code dateStyle} or {@code timeStyle} is not one of
* SHORT, MEDIUM, LONG, FULL, or DEFAULT.
*/
public final static DateFormat getDateTimeInstance(int dateStyle,
int timeStyle) {
checkTimeStyle(timeStyle);
checkDateStyle(dateStyle);
return getDateTimeInstance(dateStyle, timeStyle, Locale.getDefault());
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates
* and time values in the specified styles for the specified locale.
*
* @param dateStyle
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @param timeStyle
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @param locale
* the locale.
* @return the {@code DateFormat} instance for {@code dateStyle},
* {@code timeStyle} and {@code locale}.
* @throws IllegalArgumentException
* if {@code dateStyle} or {@code timeStyle} is not one of
* SHORT, MEDIUM, LONG, FULL, or DEFAULT.
*/
public final static DateFormat getDateTimeInstance(int dateStyle,
int timeStyle, Locale locale) {
checkTimeStyle(timeStyle);
checkDateStyle(dateStyle);
com.ibm.icu.text.DateFormat icuFormat = com.ibm.icu.text.DateFormat.getDateTimeInstance(dateStyle, timeStyle, locale);
return new SimpleDateFormat(locale, (com.ibm.icu.text.SimpleDateFormat)icuFormat);
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing dates
* and times in the SHORT style for the default locale.
*
* @return the {@code DateFormat} instance for the SHORT style and default
* locale.
*/
public final static DateFormat getInstance() {
return getDateTimeInstance(SHORT, SHORT);
}
/**
* Returns the {@code NumberFormat} used by this {@code DateFormat}.
*
* @return the {@code NumberFormat} used by this date format.
*/
public NumberFormat getNumberFormat() {
return numberFormat;
}
static String getStyleName(int style) {
String styleName;
switch (style) {
case SHORT:
styleName = "SHORT"; //$NON-NLS-1$
break;
case MEDIUM:
styleName = "MEDIUM"; //$NON-NLS-1$
break;
case LONG:
styleName = "LONG"; //$NON-NLS-1$
break;
case FULL:
styleName = "FULL"; //$NON-NLS-1$
break;
default:
styleName = ""; //$NON-NLS-1$
}
return styleName;
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing time
* values in the DEFAULT style for the default locale.
*
* @return the {@code DateFormat} instance for the default style and locale.
*/
public final static DateFormat getTimeInstance() {
return getTimeInstance(DEFAULT);
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing time
* values in the specified style for the default locale.
*
* @param style
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @return the {@code DateFormat} instance for {@code style} and the default
* locale.
* @throws IllegalArgumentException
* if {@code style} is not one of SHORT, MEDIUM, LONG, FULL, or
* DEFAULT.
*/
public final static DateFormat getTimeInstance(int style) {
checkTimeStyle(style);
return getTimeInstance(style, Locale.getDefault());
}
/**
* Returns a {@code DateFormat} instance for formatting and parsing time
* values in the specified style for the specified locale.
*
* @param style
* one of SHORT, MEDIUM, LONG, FULL, or DEFAULT.
* @param locale
* the locale.
* @throws IllegalArgumentException
* if {@code style} is not one of SHORT, MEDIUM, LONG, FULL, or
* DEFAULT.
* @return the {@code DateFormat} instance for {@code style} and
* {@code locale}.
*/
public final static DateFormat getTimeInstance(int style, Locale locale) {
checkTimeStyle(style);
com.ibm.icu.text.DateFormat icuFormat = com.ibm.icu.text.DateFormat.getTimeInstance(style, locale);
return new SimpleDateFormat(locale, (com.ibm.icu.text.SimpleDateFormat)icuFormat);
}
/**
* Returns the time zone of this date format's calendar.
*
* @return the time zone of the calendar used by this date format.
*/
public TimeZone getTimeZone() {
return calendar.getTimeZone();
}
@Override
public int hashCode() {
return calendar.getFirstDayOfWeek()
+ calendar.getMinimalDaysInFirstWeek()
+ calendar.getTimeZone().hashCode()
+ (calendar.isLenient() ? 1231 : 1237)
+ numberFormat.hashCode();
}
/**
* Indicates whether the calendar used by this date format is lenient.
*
* @return {@code true} if the calendar is lenient; {@code false} otherwise.
*/
public boolean isLenient() {
return calendar.isLenient();
}
/**
* Parses a date from the specified string using the rules of this date
* format.
*
* @param string
* the string to parse.
* @return the {@code Date} resulting from the parsing.
* @throws ParseException
* if an error occurs during parsing.
*/
public Date parse(String string) throws ParseException {
ParsePosition position = new ParsePosition(0);
Date date = parse(string, position);
if (position.getIndex() == 0) {
// text.19=Unparseable date: {0}
throw new ParseException(
Messages.getString("text.19", string), position.getErrorIndex()); //$NON-NLS-1$
}
return date;
}
/**
* Parses a date from the specified string starting at the index specified
* by {@code position}. If the string is successfully parsed then the index
* of the {@code ParsePosition} is updated to the index following the parsed
* text. On error, the index is unchanged and the error index of {@code
* ParsePosition} is set to the index where the error occurred.
* <p>
* By default, parsing is lenient: If the input is not in the form used by
* this object's format method but can still be parsed as a date, then the
* parse succeeds. Clients may insist on strict adherence to the format by
* calling {@code setLenient(false)}.
*
* @param string
* the string to parse.
* @param position
* input/output parameter, specifies the start index in {@code
* string} from where to start parsing. If parsing is successful,
* it is updated with the index following the parsed text; on
* error, the index is unchanged and the error index is set to
* the index where the error occurred.
* @return the date resulting from the parse, or {@code null} if there is an
* error.
*/
public abstract Date parse(String string, ParsePosition position);
/**
* Parses a date from the specified string starting at the index specified
* by {@code position}. If the string is successfully parsed then the index
* of the {@code ParsePosition} is updated to the index following the parsed
* text. On error, the index is unchanged and the error index of
* {@code ParsePosition} is set to the index where the error occurred.
* <p>
* By default, parsing is lenient: If the input is not in the form used by
* this object's format method but can still be parsed as a date, then the
* parse succeeds. Clients may insist on strict adherence to the format by
* calling {@code setLenient(false)}.
*
* @param string
* the string to parse.
* @param position
* input/output parameter, specifies the start index in
* {@code string} from where to start parsing. If parsing is
* successful, it is updated with the index following the parsed
* text; on error, the index is unchanged and the error index
* is set to the index where the error occurred.
* @return the date resulting from the parsing, or {@code null} if there is
* an error.
*/
@Override
public Object parseObject(String string, ParsePosition position) {
return parse(string, position);
}
/**
* Sets the calendar used by this date format.
*
* @param cal
* the new calendar.
*/
public void setCalendar(Calendar cal) {
calendar = cal;
}
/**
* Specifies whether or not date/time parsing shall be lenient. With lenient
* parsing, the parser may use heuristics to interpret inputs that do not
* precisely match this object's format. With strict parsing, inputs must
* match this object's format.
*
* @param value
* {@code true} to set the calendar to be lenient, {@code false}
* otherwise.
*/
public void setLenient(boolean value) {
calendar.setLenient(value);
}
/**
* Sets the {@code NumberFormat} used by this date format.
*
* @param format
* the new number format.
*/
public void setNumberFormat(NumberFormat format) {
numberFormat = format;
}
/**
* Sets the time zone of the calendar used by this date format.
*
* @param timezone
* the new time zone.
*/
public void setTimeZone(TimeZone timezone) {
calendar.setTimeZone(timezone);
}
/**
* The instances of this inner class are used as attribute keys and values
* in {@code AttributedCharacterIterator} that the
* {@link SimpleDateFormat#formatToCharacterIterator(Object)} method returns.
* <p>
* There is no public constructor in this class, the only instances are the
* constants defined here.
*/
public static class Field extends Format.Field {
private static final long serialVersionUID = 7441350119349544720L;
private static Hashtable<Integer, Field> table = new Hashtable<Integer, Field>();
/**
* Marks the era part of a date.
*/
public final static Field ERA = new Field("era", Calendar.ERA); //$NON-NLS-1$
/**
* Marks the year part of a date.
*/
public final static Field YEAR = new Field("year", Calendar.YEAR); //$NON-NLS-1$
/**
* Marks the month part of a date.
*/
public final static Field MONTH = new Field("month", Calendar.MONTH); //$NON-NLS-1$
/**
* Marks the hour of the day part of a date (0-11).
*/
public final static Field HOUR_OF_DAY0 = new Field("hour of day", //$NON-NLS-1$
Calendar.HOUR_OF_DAY);
/**
* Marks the hour of the day part of a date (1-12).
*/
public final static Field HOUR_OF_DAY1 = new Field("hour of day 1", -1); //$NON-NLS-1$
/**
* Marks the minute part of a time.
*/
public final static Field MINUTE = new Field("minute", Calendar.MINUTE); //$NON-NLS-1$
/**
* Marks the second part of a time.
*/
public final static Field SECOND = new Field("second", Calendar.SECOND); //$NON-NLS-1$
/**
* Marks the millisecond part of a time.
*/
public final static Field MILLISECOND = new Field("millisecond", //$NON-NLS-1$
Calendar.MILLISECOND);
/**
* Marks the day of the week part of a date.
*/
public final static Field DAY_OF_WEEK = new Field("day of week", //$NON-NLS-1$
Calendar.DAY_OF_WEEK);
/**
* Marks the day of the month part of a date.
*/
public final static Field DAY_OF_MONTH = new Field("day of month", //$NON-NLS-1$
Calendar.DAY_OF_MONTH);
/**
* Marks the day of the year part of a date.
*/
public final static Field DAY_OF_YEAR = new Field("day of year", //$NON-NLS-1$
Calendar.DAY_OF_YEAR);
/**
* Marks the day of the week in the month part of a date.
*/
public final static Field DAY_OF_WEEK_IN_MONTH = new Field(
"day of week in month", Calendar.DAY_OF_WEEK_IN_MONTH); //$NON-NLS-1$
/**
* Marks the week of the year part of a date.
*/
public final static Field WEEK_OF_YEAR = new Field("week of year", //$NON-NLS-1$
Calendar.WEEK_OF_YEAR);
/**
* Marks the week of the month part of a date.
*/
public final static Field WEEK_OF_MONTH = new Field("week of month", //$NON-NLS-1$
Calendar.WEEK_OF_MONTH);
/**
* Marks the time indicator part of a date.
*/
public final static Field AM_PM = new Field("am pm", Calendar.AM_PM); //$NON-NLS-1$
/**
* Marks the hour part of a date (0-11).
*/
public final static Field HOUR0 = new Field("hour", Calendar.HOUR); //$NON-NLS-1$
/**
* Marks the hour part of a date (1-12).
*/
public final static Field HOUR1 = new Field("hour 1", -1); //$NON-NLS-1$
/**
* Marks the time zone part of a date.
*/
public final static Field TIME_ZONE = new Field("time zone", -1); //$NON-NLS-1$
/**
* The calendar field that this field represents.
*/
private int calendarField = -1;
/**
* Constructs a new instance of {@code DateFormat.Field} with the given
* fieldName and calendar field.
*
* @param fieldName
* the field name.
* @param calendarField
* the calendar field type of the field.
*/
protected Field(String fieldName, int calendarField) {
super(fieldName);
this.calendarField = calendarField;
if (calendarField != -1
&& table.get(new Integer(calendarField)) == null) {
table.put(new Integer(calendarField), this);
}
}
/**
* Returns the Calendar field that this field represents.
*
* @return the calendar field.
*/
public int getCalendarField() {
return calendarField;
}
/**
* Returns the {@code DateFormat.Field} instance for the given calendar
* field.
*
* @param calendarField
* a calendar field constant.
* @return the {@code DateFormat.Field} corresponding to
* {@code calendarField}.
* @throws IllegalArgumentException
* if {@code calendarField} is negative or greater than the
* field count of {@code Calendar}.
*/
public static Field ofCalendarField(int calendarField) {
if (calendarField < 0 || calendarField >= Calendar.FIELD_COUNT) {
throw new IllegalArgumentException();
}
return table.get(new Integer(calendarField));
}
/**
* Resolves instances that are deserialized to the constant
* {@code DateFormat.Field} values.
*
* @return the resolved field object.
* @throws InvalidObjectException
* if an error occurs while resolving the field object.
*/
@Override
protected Object readResolve() throws InvalidObjectException {
if (this.getClass() != Field.class) {
// text.0C=cannot resolve subclasses
throw new InvalidObjectException(Messages.getString("text.0C")); //$NON-NLS-1$
}
if (calendarField != -1) {
try {
Field result = ofCalendarField(calendarField);
if (result != null && this.getName().equals(result.getName())) {
return result;
}
} catch (IllegalArgumentException e) {
// text.02=Unknown attribute
throw new InvalidObjectException(Messages
.getString("text.02")); //$NON-NLS-1$
}
} else {
if (this.equals(TIME_ZONE)) {
return TIME_ZONE;
}
if (this.equals(HOUR1)) {
return HOUR1;
}
if (this.equals(HOUR_OF_DAY1)) {
return HOUR_OF_DAY1;
}
}
// text.02=Unknown attribute
throw new InvalidObjectException(Messages.getString("text.02")); //$NON-NLS-1$
}
}
private static void checkDateStyle(int style) {
if (!(style == SHORT || style == MEDIUM || style == LONG
|| style == FULL || style == DEFAULT)) {
// text.0E=Illegal date style: {0}
throw new IllegalArgumentException(Messages.getString(
"text.0E", style)); //$NON-NLS-1$
}
}
private static void checkTimeStyle(int style) {
if (!(style == SHORT || style == MEDIUM || style == LONG
|| style == FULL || style == DEFAULT)) {
// text.0F=Illegal time style: {0}
throw new IllegalArgumentException(Messages.getString(
"text.0F", style)); //$NON-NLS-1$
}
}
}
|
apache/maven-surefire | 34,529 | maven-surefire-report-plugin/src/test/java/org/apache/maven/plugins/surefire/report/SurefireReportTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.plugins.surefire.report;
import java.io.File;
import java.util.Collections;
import java.util.List;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.LegacySupport;
import org.apache.maven.plugin.MojoExecution;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.testing.AbstractMojoTestCase;
import org.apache.maven.plugin.testing.ArtifactStubFactory;
import org.apache.maven.plugin.testing.stubs.MavenProjectStub;
import org.apache.maven.plugins.surefire.report.stubs.DependencyArtifactStubFactory;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.utils.io.FileUtils;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.internal.impl.SimpleLocalRepositoryManagerFactory;
import org.eclipse.aether.repository.LocalRepository;
import static org.apache.maven.plugins.surefire.report.Utils.toSystemNewLine;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* @author <a href="mailto:aramirez@apache.org">Allan Ramirez</a>
*/
@SuppressWarnings("checkstyle:linelength")
public class SurefireReportTest extends AbstractMojoTestCase {
private ArtifactStubFactory artifactStubFactory;
@Override
protected void setUp() throws Exception {
super.setUp();
artifactStubFactory = new DependencyArtifactStubFactory(getTestFile("target"), true, false);
artifactStubFactory.getWorkingDir().mkdirs();
}
protected File getPluginXmlFile(String projectDirName) {
return new File(getBasedir(), "src/test/resources/unit/" + projectDirName + "/plugin-config.xml");
}
protected SurefireReport createReportMojo(File pluginXmlFile) throws Exception {
SurefireReport mojo = (SurefireReport) lookupMojo("report", pluginXmlFile);
assertNotNull("Mojo not found.", mojo);
LegacySupport legacySupport = lookup(LegacySupport.class);
legacySupport.setSession(newMavenSession(new MavenProjectStub()));
DefaultRepositorySystemSession repoSession =
(DefaultRepositorySystemSession) legacySupport.getRepositorySession();
repoSession.setLocalRepositoryManager(new SimpleLocalRepositoryManagerFactory()
.newInstance(repoSession, new LocalRepository(artifactStubFactory.getWorkingDir())));
List<MavenProject> reactorProjects =
mojo.getReactorProjects() != null ? mojo.getReactorProjects() : Collections.emptyList();
setVariableValueToObject(mojo, "mojoExecution", getMockMojoExecution());
// setVariableValueToObject(mojo, "session", legacySupport.getSession());
setVariableValueToObject(mojo, "repoSession", legacySupport.getRepositorySession());
setVariableValueToObject(mojo, "reactorProjects", reactorProjects);
setVariableValueToObject(
mojo, "remoteProjectRepositories", mojo.getProject().getRemoteProjectRepositories());
setVariableValueToObject(
mojo, "siteDirectory", new File(mojo.getProject().getBasedir(), "src/site"));
return mojo;
}
public void testBasicSurefireReport() throws Exception {
File testPom = getPluginXmlFile("basic-surefire-report-test");
SurefireReport mojo = createReportMojo(testPom);
File outputDir = (File) getVariableValueFromObject(mojo, "outputDirectory");
boolean showSuccess = (Boolean) getVariableValueFromObject(mojo, "showSuccess");
File reportsDir = (File) getVariableValueFromObject(mojo, "reportsDirectory");
String outputName = (String) getVariableValueFromObject(mojo, "outputName");
File xrefTestLocation = (File) getVariableValueFromObject(mojo, "xrefTestLocation");
boolean linkXRef = (Boolean) getVariableValueFromObject(mojo, "linkXRef");
assertEquals(new File(getBasedir() + "/target/site/unit/basic-surefire-report-test"), outputDir);
assertTrue(showSuccess);
assertEquals(
new File(getBasedir() + "/src/test/resources/unit/basic-surefire-report-test/surefire-reports")
.getAbsolutePath(),
reportsDir.getAbsolutePath());
assertEquals("surefire", outputName);
assertEquals(
new File(getBasedir() + "/target/site/unit/basic-surefire-report-test/xref-test").getAbsolutePath(),
xrefTestLocation.getAbsolutePath());
assertTrue(linkXRef);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/basic-surefire-report-test/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
int idx = htmlContent.indexOf("images/icon_success_sml.gif");
assertTrue(idx >= 0);
}
private MojoExecution getMockMojoExecution() {
MojoDescriptor md = new MojoDescriptor();
md.setGoal("report");
MojoExecution me = new MojoExecution(md);
PluginDescriptor pd = new PluginDescriptor();
Plugin p = new Plugin();
p.setGroupId("org.apache.maven.plugins");
p.setArtifactId("maven-surefire-report-plugin");
pd.setPlugin(p);
md.setPluginDescriptor(pd);
return me;
}
public void testBasicSurefireReportIfShowSuccessIsFalse() throws Exception {
File testPom = getPluginXmlFile("basic-surefire-report-success-false");
SurefireReport mojo = createReportMojo(testPom);
boolean showSuccess = (Boolean) getVariableValueFromObject(mojo, "showSuccess");
assertFalse(showSuccess);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/basic-surefire-report-success-false/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
int idx = htmlContent.indexOf("images/icon_success_sml.gif");
assertTrue(idx < 0);
}
public void testBasicSurefireReportIfLinkXrefIsFalse() throws Exception {
File testPom = getPluginXmlFile("basic-surefire-report-linkxref-false");
SurefireReport mojo = createReportMojo(testPom);
boolean linkXRef = (Boolean) getVariableValueFromObject(mojo, "linkXRef");
assertFalse(linkXRef);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/basic-surefire-report-linkxref-false/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
int idx = htmlContent.indexOf("./xref-test/com/shape/CircleTest.html#L44");
assertTrue(idx == -1);
}
public void testBasicSurefireReportIfReportingIsNull() throws Exception {
File testPom = getPluginXmlFile("basic-surefire-report-reporting-null");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/basic-surefire-report-reporting-null/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
int idx = htmlContent.indexOf("./xref-test/com/shape/CircleTest.html#L44");
assertTrue(idx < 0);
}
@SuppressWarnings("checkstyle:methodname")
public void testBasicSurefireReport_AnchorTestCases() throws Exception {
File testPom = getPluginXmlFile("basic-surefire-report-anchor-test-cases");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/basic-surefire-report-anchor-test-cases/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
int idx = htmlContent.indexOf("<td><a id=\"TC_com.shape.CircleTest.testX\"></a>testX</td>");
assertTrue(idx > 0);
idx = htmlContent.indexOf("<td><a id=\"TC_com.shape.CircleTest.testRadius\"></a>"
+ "<a href=\"#com.shape.CircleTest.testRadius\">testRadius</a>");
assertTrue(idx > 0);
}
public void testSurefireReportSingleError() throws Exception {
File testPom = getPluginXmlFile("surefire-report-single-error");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/surefire-report-single-error/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td><a href=\"#surefire\">surefire</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>"
+ "<a href=\"#surefire.MyTest\">"
+ "<img src=\"images/icon_error_sml.gif\" />"
+ "</a>"
+ "</td>\n"
+ "<td><a href=\"#surefire.MyTest\">MyTest</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(htmlContent, containsString(">surefire.MyTest:13</a>"));
assertThat(htmlContent, containsString("./xref-test/surefire/MyTest.html#L13"));
assertThat(
htmlContent,
containsString(toSystemNewLine("<pre>"
+ "java.lang.RuntimeException: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:24)\n"
+ "\tat surefire.MyTest.newRethrownDelegate(MyTest.java:17)\n"
+ "\tat surefire.MyTest.test(MyTest.java:13)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\n"
+ "\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n"
+ "\tat java.lang.reflect.Method.invoke(Method.java:606)\n"
+ "\tat org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)\n"
+ "\tat org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)\n"
+ "\tat org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)\n"
+ "\tat org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)\n"
+ "\tat org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)\n"
+ "\tat org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)\n"
+ "\tat org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)\n"
+ "\tat org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)\n"
+ "\tat org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)\n"
+ "\tat org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)\n"
+ "\tat org.junit.runners.ParentRunner.run(ParentRunner.java:363)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:272)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:167)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:147)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:130)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:211)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:163)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:105)\n"
+ "\tCaused by: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.failure(MyTest.java:33)\n"
+ "\tat surefire.MyTest.access$100(MyTest.java:9)\n"
+ "\tat surefire.MyTest$Nested.run(MyTest.java:38)\n"
+ "\tat surefire.MyTest.delegate(MyTest.java:29)\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:22)" + "</pre>")));
}
public void testSurefireReportNestedClassTrimStackTrace() throws Exception {
File testPom = getPluginXmlFile("surefire-report-nestedClass-trimStackTrace");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report =
new File(getBasedir(), "target/site/unit/surefire-report-nestedClass-trimStackTrace/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td><a href=\"#surefire\">surefire</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>"
+ "<a href=\"#surefire.MyTest\">"
+ "<img src=\"images/icon_error_sml.gif\" />"
+ "</a>"
+ "</td>\n"
+ "<td><a href=\"#surefire.MyTest\">MyTest</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(htmlContent, containsString(">surefire.MyTest:13</a>"));
assertThat(htmlContent, containsString("./xref-test/surefire/MyTest.html#L13"));
assertThat(
htmlContent,
containsString(toSystemNewLine("<pre>"
+ "java.lang.RuntimeException: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:24)\n"
+ "\tat surefire.MyTest.newRethrownDelegate(MyTest.java:17)\n"
+ "\tat surefire.MyTest.test(MyTest.java:13)\n"
+ "\tCaused by: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.failure(MyTest.java:33)\n"
+ "\tat surefire.MyTest.access$100(MyTest.java:9)\n"
+ "\tat surefire.MyTest$Nested.run(MyTest.java:38)\n"
+ "\tat surefire.MyTest.delegate(MyTest.java:29)\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:22)"
+ "</pre>")));
}
public void testSurefireReportNestedClass() throws Exception {
File testPom = getPluginXmlFile("surefire-report-nestedClass");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/surefire-report-nestedClass/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td><a href=\"#surefire\">surefire</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>"
+ "<a href=\"#surefire.MyTest\">"
+ "<img src=\"images/icon_error_sml.gif\" />"
+ "</a>"
+ "</td>\n"
+ "<td><a href=\"#surefire.MyTest\">MyTest</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(htmlContent, containsString(">surefire.MyTest:13</a>"));
assertThat(htmlContent, containsString("./xref-test/surefire/MyTest.html#L13"));
assertThat(
htmlContent,
containsString(toSystemNewLine("<pre>"
+ "java.lang.RuntimeException: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:24)\n"
+ "\tat surefire.MyTest.newRethrownDelegate(MyTest.java:17)\n"
+ "\tat surefire.MyTest.test(MyTest.java:13)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\n"
+ "\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n"
+ "\tat java.lang.reflect.Method.invoke(Method.java:606)\n"
+ "\tat org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)\n"
+ "\tat org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)\n"
+ "\tat org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)\n"
+ "\tat org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)\n"
+ "\tat org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)\n"
+ "\tat org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)\n"
+ "\tat org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)\n"
+ "\tat org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)\n"
+ "\tat org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)\n"
+ "\tat org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)\n"
+ "\tat org.junit.runners.ParentRunner.run(ParentRunner.java:363)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:272)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:167)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:147)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:130)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:211)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:163)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:105)\n"
+ "\tCaused by: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.failure(MyTest.java:33)\n"
+ "\tat surefire.MyTest.access$100(MyTest.java:9)\n"
+ "\tat surefire.MyTest$Nested.run(MyTest.java:38)\n"
+ "\tat surefire.MyTest.delegate(MyTest.java:29)\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:22)"
+ "</pre>")));
}
public void testSurefireReportEnclosedTrimStackTrace() throws Exception {
File testPom = getPluginXmlFile("surefire-report-enclosed-trimStackTrace");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/surefire-report-enclosed-trimStackTrace/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td><a href=\"#surefire\">surefire</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>"
+ "<a href=\"#surefire.MyTest$A\">"
+ "<img src=\"images/icon_error_sml.gif\" />"
+ "</a>"
+ "</td>\n"
+ "<td><a href=\"#surefire.MyTest$A\">MyTest$A</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(htmlContent, containsString(">surefire.MyTest$A:45</a>"));
assertThat(htmlContent, containsString("./xref-test/surefire/MyTest$A.html#L45"));
assertThat(
htmlContent,
containsString(toSystemNewLine("<pre>"
+ "java.lang.RuntimeException: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.failure(MyTest.java:33)\n"
+ "\tat surefire.MyTest.access$100(MyTest.java:9)\n"
+ "\tat surefire.MyTest$Nested.run(MyTest.java:38)\n"
+ "\tat surefire.MyTest.delegate(MyTest.java:29)\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:22)\n"
+ "\tat surefire.MyTest.newRethrownDelegate(MyTest.java:17)\n"
+ "\tat surefire.MyTest.access$200(MyTest.java:9)\n"
+ "\tat surefire.MyTest$A.t(MyTest.java:45)\n"
+ "</pre>")));
}
public void testSurefireReportEnclosed() throws Exception {
File testPom = getPluginXmlFile("surefire-report-enclosed");
SurefireReport mojo = createReportMojo(testPom);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/surefire-report-enclosed/surefire.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td><a href=\"#surefire\">surefire</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(
htmlContent,
containsString(toSystemNewLine("<tr class=\"b\">\n"
+ "<td>"
+ "<a href=\"#surefire.MyTest$A\">"
+ "<img src=\"images/icon_error_sml.gif\" />"
+ "</a>"
+ "</td>\n"
+ "<td><a href=\"#surefire.MyTest$A\">MyTest$A</a></td>\n"
+ "<td>1</td>\n"
+ "<td>1</td>\n"
+ "<td>0</td>\n"
+ "<td>0</td>\n"
+ "<td>0%</td>\n"
+ "<td>0 s</td></tr>")));
assertThat(htmlContent, containsString(">surefire.MyTest$A:45</a>"));
assertThat(htmlContent, containsString("./xref-test/surefire/MyTest$A.html#L45"));
assertThat(
htmlContent,
containsString(
toSystemNewLine("<pre>" + "java.lang.RuntimeException: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:24)\n"
+ "\tat surefire.MyTest.newRethrownDelegate(MyTest.java:17)\n"
+ "\tat surefire.MyTest.access$200(MyTest.java:9)\n"
+ "\tat surefire.MyTest$A.t(MyTest.java:45)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n"
+ "\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\n"
+ "\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n"
+ "\tat java.lang.reflect.Method.invoke(Method.java:606)\n"
+ "\tat org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)\n"
+ "\tat org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)\n"
+ "\tat org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)\n"
+ "\tat org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)\n"
+ "\tat org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)\n"
+ "\tat org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)\n"
+ "\tat org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)\n"
+ "\tat org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)\n"
+ "\tat org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)\n"
+ "\tat org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)\n"
+ "\tat org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)\n"
+ "\tat org.junit.runners.ParentRunner.run(ParentRunner.java:363)\n"
+ "\tat org.junit.runners.Suite.runChild(Suite.java:128)\n"
+ "\tat org.junit.runners.Suite.runChild(Suite.java:27)\n"
+ "\tat org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)\n"
+ "\tat org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)\n"
+ "\tat org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)\n"
+ "\tat org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)\n"
+ "\tat org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)\n"
+ "\tat org.junit.runners.ParentRunner.run(ParentRunner.java:363)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:272)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:167)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:147)\n"
+ "\tat org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:130)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:211)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:163)\n"
+ "\tat org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:105)\n"
+ "\tCaused by: java.lang.IndexOutOfBoundsException\n"
+ "\tat surefire.MyTest.failure(MyTest.java:33)\n"
+ "\tat surefire.MyTest.access$100(MyTest.java:9)\n"
+ "\tat surefire.MyTest$Nested.run(MyTest.java:38)\n"
+ "\tat surefire.MyTest.delegate(MyTest.java:29)\n"
+ "\tat surefire.MyTest.rethrownDelegate(MyTest.java:22)\n"
+ "</pre>")));
}
public void testCustomTitleAndDescriptionReport() throws Exception {
File testPom = getPluginXmlFile("surefire-1183");
SurefireReport mojo = createReportMojo(testPom);
File outputDir = (File) getVariableValueFromObject(mojo, "outputDirectory");
String outputName = (String) getVariableValueFromObject(mojo, "outputName");
File reportsDir = (File) getVariableValueFromObject(mojo, "reportsDirectory");
assertEquals(new File(getBasedir() + "/target/site/unit/surefire-1183"), outputDir);
assertEquals(
new File(getBasedir() + "/src/test/resources/unit/surefire-1183/acceptancetest-reports")
.getAbsolutePath(),
reportsDir.getAbsolutePath());
assertEquals("acceptance-test", outputName);
mojo.execute();
File report = new File(getBasedir(), "target/site/unit/surefire-1183/acceptance-test.html");
assertTrue(report.exists());
String htmlContent = FileUtils.fileRead(report);
assertThat(
htmlContent,
containsString(toSystemNewLine("<section><a id=\"Acceptance_Test\"></a>\n<h1>Acceptance Test</h1>")));
}
}
|
apache/falcon | 35,175 | prism/src/main/java/org/apache/falcon/resource/proxy/InstanceManagerProxy.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.resource.proxy;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import org.apache.falcon.FalconException;
import org.apache.falcon.FalconRuntimException;
import org.apache.falcon.FalconWebException;
import org.apache.falcon.LifeCycle;
import org.apache.falcon.monitors.Dimension;
import org.apache.falcon.monitors.Monitored;
import org.apache.falcon.resource.APIResult;
import org.apache.falcon.resource.AbstractInstanceManager;
import org.apache.falcon.resource.FeedInstanceResult;
import org.apache.falcon.resource.InstanceDependencyResult;
import org.apache.falcon.resource.InstancesResult;
import org.apache.falcon.resource.InstancesSummaryResult;
import org.apache.falcon.resource.TriageResult;
import org.apache.falcon.resource.channel.Channel;
import org.apache.falcon.resource.channel.ChannelFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A proxy implementation of the entity instance operations.
*/
@Path("instance")
public class InstanceManagerProxy extends AbstractInstanceManager {
private static final Logger LOG = LoggerFactory.getLogger(InstanceManagerProxy.class);
private final Map<String, Channel> processInstanceManagerChannels = new HashMap<String, Channel>();
public InstanceManagerProxy() {
try {
Set<String> colos = getAllColos();
for (String colo : colos) {
initializeFor(colo);
}
} catch (FalconException e) {
throw new FalconRuntimException("Unable to initialize channels", e);
}
}
private void initializeFor(String colo) throws FalconException {
processInstanceManagerChannels.put(colo, ChannelFactory.get("ProcessInstanceManager", colo));
}
private Channel getInstanceManager(String colo) throws FalconException {
if (!processInstanceManagerChannels.containsKey(colo)) {
initializeFor(colo);
}
return processInstanceManagerChannels.get(colo);
}
//SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
/**
* Get a list of instances currently running for a given entity.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param colo <optional param> Colo on which the query should be run.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process
* is Execution(default).
* @param filterBy <optional param> Filter results by list of field:value pairs. Example:
* filterBy=CLUSTER:primary-cluster
* Supported filter fields are CLUSTER, SOURCECLUSTER, STARTEDAFTER.
* Query will do an AND among filterBy fields.
* @param orderBy <optional param> Field by which results should be ordered
* Supports ordering by "status","startTime","endTime","cluster".
* @param sortOrder <optional param> Valid options are "asc" and "desc"
* @param offset <optional param> Show results from the offset, used for pagination. Defaults to 0.
* @param numResults <optional param> Number of results to show per request, used for pagination.
* Only integers > 0 are valid, Default is 10.
* @return List of instances currently running.
*/
@GET
@Path("running/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "running")
@Override
public InstancesResult getRunningInstances(
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("colo") @QueryParam("colo") String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@DefaultValue("") @QueryParam("filterBy") final String filterBy,
@DefaultValue("") @QueryParam("orderBy") final String orderBy,
@DefaultValue("") @QueryParam("sortOrder") final String sortOrder,
@DefaultValue("0") @QueryParam("offset") final Integer offset,
@QueryParam("numResults") final Integer numResults) {
final Integer resultsPerPage = numResults == null ? getDefaultResultsPerPage() : numResults;
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).
invoke("getRunningInstances", type, entity, colo, lifeCycles,
filterBy, orderBy, sortOrder, offset, resultsPerPage);
}
}.execute(colo, type, entity);
}
/*
getStatus(...) method actually gets all instances, filtered by a specific status. This is
a better named API which achieves the same result
*/
/**
* Get list of all instances of a given entity.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param startStr <optional param> Show instances from this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* By default, it is set to (end - (10 * entityFrequency)).
* @param endStr <optional param> Show instances up to this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* Default is set to now.
* @param colo <optional param> Colo on which the query should be run.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process
* is Execution(default).
* @param filterBy <optional param> Filter results by list of field:value pairs. Example:
* filterBy=STATUS:RUNNING,CLUSTER:primary-cluster
* Supported filter fields are STATUS, CLUSTER, SOURCECLUSTER, STARTEDAFTER.
* Query will do an AND among filterBy fields.
* @param orderBy <optional param> Field by which results should be ordered.
* Supports ordering by "status","startTime","endTime","cluster".
* @param sortOrder <optional param> Valid options are "asc" and "desc"
* @param offset <optional param> Show results from the offset, used for pagination. Defaults to 0.
* @param numResults <optional param> Number of results to show per request, used for pagination.
* Only integers > 0 are valid, Default is 10.
* @return List of instances of given entity
*/
@GET
@Path("list/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-list")
@Override
public InstancesResult getInstances(
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") final String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@DefaultValue("") @QueryParam("filterBy") final String filterBy,
@DefaultValue("") @QueryParam("orderBy") final String orderBy,
@DefaultValue("") @QueryParam("sortOrder") final String sortOrder,
@DefaultValue("0") @QueryParam("offset") final Integer offset,
@QueryParam("numResults") Integer numResults,
@Dimension("allAttempts") @QueryParam("allAttempts") final Boolean allAttempts) {
final Integer resultsPerPage = numResults == null ? getDefaultResultsPerPage() : numResults;
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getInstances",
type, entity, startStr, endStr, colo, lifeCycles,
filterBy, orderBy, sortOrder, offset, resultsPerPage, allAttempts);
}
}.execute(colo, type, entity);
}
/**
* Get status of a specific instance of an entity.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param startStr <optional param> Show instances from this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* By default, it is set to (end - (10 * entityFrequency)).
* @param endStr <optional param> Show instances up to this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* Default is set to now.
* @param colo <optional param> Colo on which the query should be run.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process
* is Execution(default).
* @param filterBy <optional param> Filter results by list of field:value pairs. Example:
* filterBy=STATUS:RUNNING,CLUSTER:primary-cluster
* Supported filter fields are STATUS, CLUSTER, SOURCECLUSTER, STARTEDAFTER.
* Query will do an AND among filterBy fields.
* @param orderBy <optional param> Field by which results should be ordered.
* Supports ordering by "status","startTime","endTime","cluster".
* @param sortOrder <optional param> Valid options are "asc" and "desc"
* @param offset <optional param> Show results from the offset, used for pagination. Defaults to 0.
* @param numResults <optional param> Number of results to show per request, used for pagination.
* Only integers > 0 are valid, Default is 10.
* @return Status of the specified instance along with job urls for all actions of user workflow and non-succeeded
* actions of the main-workflow.
*/
@GET
@Path("status/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-status")
@Override
public InstancesResult getStatus(
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") final String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@DefaultValue("") @QueryParam("filterBy") final String filterBy,
@DefaultValue("") @QueryParam("orderBy") final String orderBy,
@DefaultValue("") @QueryParam("sortOrder") final String sortOrder,
@DefaultValue("0") @QueryParam("offset") final Integer offset,
@QueryParam("numResults") final Integer numResults,
@Dimension("allAttempts") @QueryParam("allAttempts") final Boolean allAttempts) {
final Integer resultsPerPage = numResults == null ? getDefaultResultsPerPage() : numResults;
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getStatus",
type, entity, startStr, endStr, colo, lifeCycles,
filterBy, orderBy, sortOrder, offset, resultsPerPage, allAttempts);
}
}.execute(colo, type, entity);
}
/**
* Get summary of instance/instances of an entity.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param startStr <optional param> Show instances from this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* By default, it is set to (end - (10 * entityFrequency)).
* @param endStr <optional param> Show instances up to this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* Default is set to now.
* @param colo <optional param> Colo on which the query should be run.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process
* is Execution(default).
* @param filterBy <optional param> Filter results by list of field:value pairs.
* Example1: filterBy=STATUS:RUNNING,CLUSTER:primary-cluster
* Example2: filterBy=Status:RUNNING,Status:KILLED
* Supported filter fields are STATUS, CLUSTER.
* Query will do an AND among filterBy fields.
* @param orderBy <optional param> Field by which results should be ordered.
* Supports ordering by "cluster". Example: orderBy=cluster
* @param sortOrder <optional param> Valid options are "asc" and "desc". Example: sortOrder=asc
* @return Summary of the instances over the specified time range
*/
@GET
@Path("summary/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-summary")
@Override
public InstancesSummaryResult getSummary(
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") final String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@DefaultValue("") @QueryParam("filterBy") final String filterBy,
@DefaultValue("") @QueryParam("orderBy") final String orderBy,
@DefaultValue("") @QueryParam("sortOrder") final String sortOrder) {
return new InstanceProxy<InstancesSummaryResult>(InstancesSummaryResult.class) {
@Override
protected InstancesSummaryResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getSummary",
type, entity, startStr, endStr, colo, lifeCycles,
filterBy, orderBy, sortOrder);
}
}.execute(colo, type, entity);
}
/**
* Get falcon feed instance availability.
* @param type Valid options is feed.
* @param entity Name of the entity.
* @param start <optional param> Show instances from this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* By default, it is set to (end - (10 * entityFrequency)).
* @param end <optional param> Show instances up to this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* Default is set to now.
* @param colo Colo on which the query should be run.
* @return Feed instance availability status
*/
@GET
@Path("listing/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-listing")
@Override
public FeedInstanceResult getListing(
@Dimension("type") @PathParam("type") final String type,
@Dimension("entity") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String start,
@Dimension("end-time") @QueryParam("end") final String end,
@Dimension("colo") @QueryParam("colo") String colo) {
return new InstanceProxy<FeedInstanceResult>(FeedInstanceResult.class) {
@Override
protected FeedInstanceResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getListing",
type, entity, start, end, colo);
}
}.execute(colo, type, entity);
}
/**
* Get the params passed to the workflow for an instance of feed/process.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param start should be the nominal time of the instance for which you want the params to be returned
* @param colo <optional param> Colo on which the query should be run.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process is
* Execution(default).
* @return List of instances currently running.
*/
@GET
@Path("params/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-params")
@Override
public InstancesResult getInstanceParams(
@Dimension("type") @PathParam("type") final String type,
@Dimension("entity") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String start,
@Dimension("colo") @QueryParam("colo") String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles) {
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getInstanceParams",
type, entity, start, colo, lifeCycles);
}
}.execute(colo, type, entity);
}
/**
* Get log of a specific instance of an entity.
* @param type Valid options are cluster, feed or process.
* @param entity Name of the entity.
* @param startStr <optional param> Show instances from this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* By default, it is set to (end - (10 * entityFrequency)).
* @param endStr <optional param> Show instances up to this date. Date format is yyyy-MM-dd'T'HH:mm'Z'.
* Default is set to now.
* @param colo <optional param> Colo on which the query should be run.
* @param runId <optional param> Run Id.
* @param lifeCycles <optional param> Valid lifecycles for feed are Eviction/Replication(default) and for process is
* Execution(default).
* @param filterBy <optional param> Filter results by list of field:value pairs.
* Example: filterBy=STATUS:RUNNING,CLUSTER:primary-cluster
* Supported filter fields are STATUS, CLUSTER, SOURCECLUSTER, STARTEDAFTER.
* Query will do an AND among filterBy fields.
* @param orderBy <optional param> Field by which results should be ordered.
* Supports ordering by "status","startTime","endTime","cluster".
* @param sortOrder <optional param> Valid options are "asc" and "desc"
* @param offset <optional param> Show results from the offset, used for pagination. Defaults to 0.
* @param numResults <optional param> Number of results to show per request, used for pagination. Only integers > 0
* are valid, Default is 10.
* @return Log of specified instance.
*/
@GET
@Path("logs/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-logs")
@Override
public InstancesResult getLogs(
@Dimension("type") @PathParam("type") final String type,
@Dimension("entity") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") final String colo,
@Dimension("run-id") @QueryParam("runid") final String runId,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@DefaultValue("") @QueryParam("filterBy") final String filterBy,
@DefaultValue("") @QueryParam("orderBy") final String orderBy,
@DefaultValue("") @QueryParam("sortOrder") final String sortOrder,
@DefaultValue("0") @QueryParam("offset") final Integer offset,
@QueryParam("numResults") final Integer numResults) {
final Integer resultsPerPage = numResults == null ? getDefaultResultsPerPage() : numResults;
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("getLogs",
type, entity, startStr, endStr, colo, runId, lifeCycles,
filterBy, orderBy, sortOrder, offset, resultsPerPage);
}
}.execute(colo, type, entity);
}
/**
* Kill currently running instance(s) of an entity.
* @param request Servlet Request
* @param type Valid options are feed or process.
* @param entity name of the entity.
* @param startStr start time of the instance(s) that you want to refer to
* @param endStr end time of the instance(s) that you want to refer to
* @param colo Colo on which the query should be run.
* @param lifeCycles <optional param> can be Eviction/Replication(default) for feed and Execution(default) for
* process.
* @return Result of the kill operation.
*/
@POST
@Path("kill/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "kill-instance")
@Override
public InstancesResult killInstance(
@Context HttpServletRequest request,
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") final String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles) {
final HttpServletRequest bufferedRequest = new BufferedRequest(request);
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("killInstance",
bufferedRequest, type, entity, startStr, endStr, colo, lifeCycles);
}
}.execute(colo, type, entity);
}
/**
* Suspend instances of an entity.
* @param request Servlet Request
* @param type Valid options are feed or process.
* @param entity name of the entity.
* @param startStr the start time of the instance(s) that you want to refer to
* @param endStr the end time of the instance(s) that you want to refer to
* @param colo Colo on which the query should be run.
* @param lifeCycles <optional param> can be Eviction/Replication(default) for feed and Execution(default) for
* process.
* @return Results of the suspend command.
*/
@POST
@Path("suspend/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "suspend-instance")
@Override
public InstancesResult suspendInstance(
@Context HttpServletRequest request,
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles) {
final HttpServletRequest bufferedRequest = new BufferedRequest(request);
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("suspendInstance",
bufferedRequest, type, entity, startStr, endStr, colo, lifeCycles);
}
}.execute(colo, type, entity);
}
/**
* Resume suspended instances of an entity.
* @param request Servlet Request
* @param type Valid options are feed or process.
* @param entity name of the entity.
* @param startStr start time of the instance(s) that you want to refer to
* @param endStr the end time of the instance(s) that you want to refer to
* @param colo Colo on which the query should be run.
* @param lifeCycles <optional param> can be Eviction/Replication(default) for feed and Execution(default) for
* process.
* @return Results of the resume command.
*/
@POST
@Path("resume/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "resume-instance")
@Override
public InstancesResult resumeInstance(
@Context HttpServletRequest request,
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Dimension("colo") @QueryParam("colo") String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles) {
final HttpServletRequest bufferedRequest = new BufferedRequest(request);
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("resumeInstance",
bufferedRequest, type, entity, startStr, endStr, colo, lifeCycles);
}
}.execute(colo, type, entity);
}
/**
* Rerun instances of an entity. On issuing a rerun, by default the execution resumes from the last failed node in
* the workflow.
* @param type Valid options are feed or process.
* @param entity name of the entity.
* @param startStr start is the start time of the instance that you want to refer to
* @param endStr end is the end time of the instance that you want to refer to
* @param request Servlet Request
* @param colo Colo on which the query should be run.
* @param lifeCycles <optional param> can be Eviction/Replication(default) for feed and Execution(default) for
* process.
* @param isForced <optional param> can be used to forcefully rerun the entire instance.
* @return Results of the rerun command.
*/
@POST
@Path("rerun/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "re-run-instance")
@Override
public InstancesResult reRunInstance(
@Dimension("entityType") @PathParam("type") final String type,
@Dimension("entityName") @PathParam("entity") final String entity,
@Dimension("start-time") @QueryParam("start") final String startStr,
@Dimension("end-time") @QueryParam("end") final String endStr,
@Context HttpServletRequest request,
@Dimension("colo") @QueryParam("colo") String colo,
@Dimension("lifecycle") @QueryParam("lifecycle") final List<LifeCycle> lifeCycles,
@Dimension("force") @QueryParam("force") final Boolean isForced) {
final HttpServletRequest bufferedRequest = new BufferedRequest(request);
return new InstanceProxy<InstancesResult>(InstancesResult.class) {
@Override
protected InstancesResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("reRunInstance",
type, entity, startStr, endStr, bufferedRequest, colo, lifeCycles, isForced);
}
}.execute(colo, type, entity);
}
/**
* Get dependent instances for a particular instance.
* @param entityType Valid options are feed or process.
* @param entityName Name of the entity
* @param instanceTimeStr <mandatory param> time of the given instance
* @param colo Colo on which the query should be run.
* @return Dependent instances for the specified instance
*/
@GET
@Path("dependencies/{type}/{entity}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-dependency")
public InstanceDependencyResult instanceDependencies(
@Dimension("type") @PathParam("type") final String entityType,
@Dimension("entityName") @PathParam("entity") final String entityName,
@Dimension("instanceTime") @QueryParam("instanceTime") final String instanceTimeStr,
@Dimension("colo") @QueryParam("colo") String colo) {
return new InstanceProxy<InstanceDependencyResult>(InstanceDependencyResult.class) {
@Override
protected InstanceDependencyResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("instanceDependencies",
entityType, entityName, instanceTimeStr, colo);
}
}.execute(colo, entityType, entityName);
}
/**
*
* @param entityType type of the entity. Only feed and process are valid entity types for triage.
* @param entityName name of the entity.
* @param instanceTime time of the instance which should be used to triage.
* @param colo Colo on which the query should be run.
* @return It returns a json graph
*/
@GET
@Path("triage/{type}/{name}")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "triage-instance")
@Override
public TriageResult triageInstance(
@Dimension("type") @PathParam("type") final String entityType,
@Dimension("name") @PathParam("name") final String entityName,
@Dimension("instanceTime") @QueryParam("start") final String instanceTime,
@Dimension("colo") @QueryParam("colo") String colo) {
return new InstanceProxy<TriageResult>(TriageResult.class) {
@Override
protected TriageResult doExecute(String colo) throws FalconException {
return getInstanceManager(colo).invoke("triageInstance", entityType, entityName, instanceTime, colo);
}
}.execute(colo, entityType, entityName);
}
@GET
@Path("search")
@Produces(MediaType.APPLICATION_JSON)
@Monitored(event = "instance-search")
@Override
public InstancesResult searchInstances(
@DefaultValue("") @QueryParam("type") String type,
@DefaultValue("") @QueryParam("nameseq") String nameSubsequence,
@DefaultValue("") @QueryParam("tagkeys") String tagKeywords,
@DefaultValue("") @QueryParam("start") String nominalStartTime,
@DefaultValue("") @QueryParam("end") String nominalEndTime,
@DefaultValue("") @QueryParam("instanceStatus") String status,
@DefaultValue("") @QueryParam("orderBy") String orderBy,
@DefaultValue("0") @QueryParam("offset") Integer offset,
@QueryParam("numResults") Integer resultsPerPage) {
return super.searchInstances(type, nameSubsequence, tagKeywords, nominalStartTime, nominalEndTime,
status, orderBy, offset, resultsPerPage);
}
//RESUME CHECKSTYLE CHECK ParameterNumberCheck
private abstract class InstanceProxy<T extends APIResult> {
private final Class<T> clazz;
public InstanceProxy(Class<T> resultClazz) {
this.clazz = resultClazz;
}
public T execute(String coloExpr, String type, String name) {
Set<String> colos = getColosFromExpression(coloExpr, type, name);
Map<String, T> results = new HashMap<String, T>();
for (String colo : colos) {
try {
T resultHolder = doExecute(colo);
results.put(colo, resultHolder);
} catch (FalconWebException e){
APIResult result = (APIResult)e.getResponse().getEntity();
results.put(colo, getResultInstance(APIResult.Status.FAILED, result.getMessage()));
} catch (Throwable e) {
LOG.error("Failed to fetch results for colo:{}", colo, e);
results.put(colo, getResultInstance(APIResult.Status.FAILED,
e.getClass().getName() + "::" + e.getMessage()));
}
}
T finalResult = consolidateResult(results, clazz);
if (finalResult.getStatus() != APIResult.Status.SUCCEEDED) {
throw FalconWebException.newAPIException(finalResult.getMessage());
} else {
return finalResult;
}
}
protected abstract T doExecute(String colo) throws FalconException;
private T getResultInstance(APIResult.Status status, String message) {
try {
Constructor<T> constructor = clazz.getConstructor(APIResult.Status.class, String.class);
return constructor.newInstance(status, message);
} catch (Exception e) {
throw new FalconRuntimException("Unable to consolidate result.", e);
}
}
}
}
|
apache/ranger | 35,432 | agents-common/src/main/java/org/apache/ranger/plugin/policyresourcematcher/RangerDefaultPolicyResourceMatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ranger.plugin.policyresourcematcher;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyResource;
import org.apache.ranger.plugin.model.RangerServiceDef;
import org.apache.ranger.plugin.model.RangerServiceDef.RangerResourceDef;
import org.apache.ranger.plugin.model.validation.RangerServiceDefHelper;
import org.apache.ranger.plugin.policyengine.RangerAccessRequest.ResourceElementMatchingScope;
import org.apache.ranger.plugin.policyengine.RangerAccessResource;
import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl;
import org.apache.ranger.plugin.policyengine.RangerPluginContext;
import org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher;
import org.apache.ranger.plugin.resourcematcher.RangerResourceMatcher;
import org.apache.ranger.plugin.util.RangerPerfTracer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.ranger.plugin.resourcematcher.RangerAbstractResourceMatcher.OPTION_WILD_CARD;
public class RangerDefaultPolicyResourceMatcher implements RangerPolicyResourceMatcher {
private static final Logger LOG = LoggerFactory.getLogger(RangerDefaultPolicyResourceMatcher.class);
private static final Logger PERF_POLICY_RESOURCE_MATCHER_INIT_LOG = RangerPerfTracer.getPerfLogger("policyresourcematcher.init");
private static final Logger PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG = RangerPerfTracer.getPerfLogger("policyresourcematcher.match");
private final boolean forceEnableWildcardMatch;
protected RangerServiceDef serviceDef;
protected int policyType;
protected Map<String, RangerPolicyResource> policyResources;
private Map<String, RangerResourceMatcher> allMatchers;
private boolean needsDynamicEval;
private List<RangerResourceDef> validResourceHierarchy;
private boolean isInitialized;
private RangerServiceDefHelper serviceDefHelper;
private RangerPluginContext pluginContext;
public RangerDefaultPolicyResourceMatcher() {
this.forceEnableWildcardMatch = false;
}
public RangerDefaultPolicyResourceMatcher(boolean forceEnableWildcardMatch) {
this.forceEnableWildcardMatch = forceEnableWildcardMatch;
}
public static boolean isHierarchyValidForResources(List<RangerResourceDef> hierarchy, Map<String, ?> resources) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> isHierarchyValidForResources({})", StringUtils.join(hierarchy, ","));
}
boolean ret = true;
if (hierarchy != null) {
boolean skipped = false;
for (RangerResourceDef resourceDef : hierarchy) {
String resourceName = resourceDef.getName();
Object resourceValue = resources.get(resourceName);
if (resourceValue == null) {
if (!skipped) {
skipped = true;
}
} else {
if (skipped) {
ret = false;
break;
}
}
}
} else {
ret = false;
}
if (LOG.isDebugEnabled()) {
LOG.debug("<== isHierarchyValidForResources({}) : {}", StringUtils.join(hierarchy, ","), ret);
}
return ret;
}
public int getPolicyType() {
return policyType;
}
public RangerServiceDefHelper getServiceDefHelper() {
return serviceDefHelper;
}
@Override
public void setServiceDefHelper(RangerServiceDefHelper serviceDefHelper) {
this.serviceDefHelper = serviceDefHelper;
}
@Override
public void setPluginContext(RangerPluginContext pluginContext) {
this.pluginContext = pluginContext;
}
@Override
public RangerServiceDef getServiceDef() {
return serviceDef;
}
@Override
public void setServiceDef(RangerServiceDef serviceDef) {
if (isInitialized) {
LOG.warn("RangerDefaultPolicyResourceMatcher is already initialized. init() must be done again after updating serviceDef");
}
this.serviceDef = serviceDef;
}
@Override
public RangerResourceMatcher getResourceMatcher(String resourceName) {
return allMatchers != null ? allMatchers.get(resourceName) : null;
}
@Override
public boolean isMatch(RangerAccessResource resource, Map<String, Object> evalContext) {
return isMatch(resource, Collections.emptyMap(), evalContext);
}
@Override
public boolean isMatch(RangerAccessResource resource, Map<String, ResourceElementMatchingScope> scopes, Map<String, Object> evalContext) {
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.grantRevokeMatch()");
}
/*
* There is already API to get the delegateAdmin permissions for a map of policyResources.
* That implementation should be reused for figuring out delegateAdmin permissions for a resource as well.
*/
Map<String, RangerPolicyResource> policyResources = null;
for (RangerResourceDef resourceDef : serviceDef.getResources()) {
String resourceName = resourceDef.getName();
Object resourceValue = resource.getValue(resourceName);
if (resourceValue instanceof String) {
String strValue = (String) resourceValue;
if (policyResources == null) {
policyResources = new HashMap<>();
}
policyResources.put(resourceName, new RangerPolicyResource(strValue));
} else if (resourceValue != null) { // return false for any other type of resourceValue
policyResources = null;
break;
}
}
final boolean ret = MapUtils.isNotEmpty(policyResources) && isMatch(policyResources, scopes, evalContext);
RangerPerfTracer.log(perf);
return ret;
}
@Override
public boolean isMatch(Map<String, RangerPolicyResource> resources, Map<String, Object> evalContext) {
return isMatch(resources, Collections.emptyMap(), evalContext);
}
@Override
public boolean isMatch(Map<String, RangerPolicyResource> resources, Map<String, ResourceElementMatchingScope> scopes, Map<String, Object> evalContext) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.isMatch({}, {})", resources, evalContext);
boolean ret = false;
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.delegateAdminMatch()");
}
if (serviceDef != null && serviceDef.getResources() != null) {
Collection<String> resourceKeys = resources == null ? null : resources.keySet();
Collection<String> policyKeys = policyResources == null ? null : policyResources.keySet();
boolean keysMatch = CollectionUtils.isEmpty(resourceKeys) || (policyKeys != null && policyKeys.containsAll(resourceKeys));
if (keysMatch) {
for (RangerResourceDef resourceDef : serviceDef.getResources()) {
String resourceName = resourceDef.getName();
RangerPolicyResource resourceValues = resources == null ? null : resources.get(resourceName);
List<String> values = resourceValues == null ? null : resourceValues.getValues();
RangerResourceMatcher matcher = allMatchers == null ? null : allMatchers.get(resourceName);
if (matcher != null) {
if (CollectionUtils.isNotEmpty(values)) {
for (String value : values) {
ret = matcher.isMatch(value, scopes.get(resourceName), evalContext);
if (!ret) {
break;
}
}
} else {
ret = matcher.isMatchAny();
}
} else {
ret = CollectionUtils.isEmpty(values);
}
if (!ret) {
break;
}
}
} else {
LOG.debug("isMatch(): keysMatch=false. resourceKeys={}; policyKeys={}", resourceKeys, policyKeys);
}
}
RangerPerfTracer.log(perf);
LOG.debug("<== RangerDefaultPolicyResourceMatcher.isMatch({}, {}): {}", resources, evalContext, ret);
return ret;
}
@Override
public boolean isMatch(RangerAccessResource resource, MatchScope scope, Map<String, Object> evalContext) {
return isMatch(resource, Collections.emptyMap(), scope, evalContext);
}
@Override
public boolean isMatch(RangerAccessResource resource, Map<String, ResourceElementMatchingScope> scopes, MatchScope scope, Map<String, Object> evalContext) {
MatchType matchType = getMatchType(resource, scopes, evalContext);
return isMatch(scope, matchType);
}
@Override
public boolean isMatch(RangerPolicy policy, MatchScope scope, Map<String, Object> evalContext) {
return isMatch(policy, Collections.emptyMap(), scope, evalContext);
}
@Override
public boolean isMatch(RangerPolicy policy, Map<String, ResourceElementMatchingScope> scopes, MatchScope scope, Map<String, Object> evalContext) {
boolean ret = false;
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.getPoliciesNonLegacy()");
}
Map<String, RangerPolicyResource> resources = policy.getResources();
if (policy.getPolicyType() == policyType && MapUtils.isNotEmpty(resources)) {
List<RangerResourceDef> hierarchy = getMatchingHierarchy(resources.keySet());
if (CollectionUtils.isNotEmpty(hierarchy)) {
MatchType matchType = MatchType.NONE;
RangerAccessResourceImpl accessResource = new RangerAccessResourceImpl();
accessResource.setServiceDef(serviceDef);
// Build up accessResource resourceDef by resourceDef.
// For each resourceDef,
// examine policy-values one by one.
// The first value that is acceptable, that is,
// value matches in any way, is used for that resourceDef, and
// next resourceDef is processed.
// If none of the values matches, the policy as a whole definitely will not match,
// therefore, the match is failed
// After all resourceDefs are processed, and some match is achieved at every
// level, the final matchType (which is for the entire policy) is checked against
// requested scope to determine the match-result.
// Unit tests in TestDefaultPolicyResourceForPolicy.java, TestDefaultPolicyResourceMatcher.java
// test_defaultpolicyresourcematcher_for_hdfs_policy.json, and
// test_defaultpolicyresourcematcher_for_hive_policy.json, and
// test_defaultPolicyResourceMatcher.json
boolean skipped = false;
for (RangerResourceDef resourceDef : hierarchy) {
String name = resourceDef.getName();
RangerPolicyResource policyResource = resources.get(name);
if (policyResource != null && CollectionUtils.isNotEmpty(policyResource.getValues())) {
ret = false;
matchType = MatchType.NONE;
if (!skipped) {
for (String value : policyResource.getValues()) {
accessResource.setValue(name, value);
matchType = getMatchType(accessResource, scopes, evalContext);
if (matchType != MatchType.NONE) { // One value for this resourceDef matched
ret = true;
break;
}
}
} else {
break;
}
} else {
skipped = true;
}
if (!ret) { // None of the values specified for this resourceDef matched, no point in continuing with next resourceDef
break;
}
}
ret = ret && isMatch(scope, matchType);
}
}
RangerPerfTracer.log(perf);
return ret;
}
@Override
public MatchType getMatchType(RangerAccessResource resource, Map<String, Object> evalContext) {
return getMatchType(resource, Collections.emptyMap(), evalContext);
}
@Override
public MatchType getMatchType(RangerAccessResource resource, Map<String, ResourceElementMatchingScope> scopes, Map<String, Object> evalContext) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.getMatchType({}{})", resource, evalContext);
MatchType ret = MatchType.NONE;
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.getMatchType()");
}
if (resource != null && policyResources != null) {
int resourceKeysSize = resource.getKeys() == null ? 0 : resource.getKeys().size();
if (policyResources.isEmpty() && resourceKeysSize == 0) {
ret = MatchType.SELF;
} else {
List<RangerResourceDef> hierarchy = getMatchingHierarchy(resource);
if (CollectionUtils.isNotEmpty(hierarchy)) {
int lastNonAnyMatcherIndex = -1;
int matchersSize = 0;
for (RangerResourceDef resourceDef : hierarchy) {
RangerResourceMatcher matcher = getResourceMatcher(resourceDef.getName());
if (matcher != null) {
if (!matcher.isMatchAny()) {
lastNonAnyMatcherIndex = matchersSize;
}
matchersSize++;
} else {
break;
}
}
if (resourceKeysSize == 0) {
ret = MatchType.SELF;
}
for (RangerResourceDef resourceDef : hierarchy) {
RangerResourceMatcher matcher = getResourceMatcher(resourceDef.getName());
Object resourceValue = resource.getValue(resourceDef.getName());
if (matcher != null) {
if (resourceValue != null || matcher.isMatchAny()) {
if (matcher.isMatch(resourceValue, scopes.get(resourceDef.getName()), evalContext)) {
ret = MatchType.SELF;
} else {
ret = MatchType.NONE;
break;
}
}
} else {
if (resourceValue != null) {
// More resource-values than matchers
ret = MatchType.ANCESTOR;
}
break;
}
}
if (ret == MatchType.SELF && resourceKeysSize < policyResources.size()) {
// More matchers than resource-values
if (resourceKeysSize > lastNonAnyMatcherIndex) {
// all remaining matchers which matched resource value of null are of type Any
ret = MatchType.SELF_AND_ALL_DESCENDANTS;
} else {
ret = MatchType.DESCENDANT;
}
}
}
}
}
RangerPerfTracer.log(perf);
LOG.debug("<== RangerDefaultPolicyResourceMatcher.getMatchType({}{}): {}", resource, evalContext, ret);
return ret;
}
@Override
public boolean isCompleteMatch(RangerAccessResource resource, Map<String, Object> evalContext) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.isCompleteMatch({}, {})", resource, evalContext);
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.grantRevokeMatch()");
}
boolean ret = false;
Collection<String> resourceKeys = resource == null ? null : resource.getKeys();
Collection<String> policyKeys = policyResources == null ? null : policyResources.keySet();
boolean keysMatch = resourceKeys != null && policyKeys != null && CollectionUtils.isEqualCollection(resourceKeys, policyKeys);
if (keysMatch) {
for (RangerResourceDef resourceDef : serviceDef.getResources()) {
String resourceName = resourceDef.getName();
Object resourceValue = resource.getValue(resourceName);
RangerResourceMatcher matcher = getResourceMatcher(resourceName);
if (resourceValue == null) {
ret = matcher == null || matcher.isCompleteMatch(null, evalContext);
} else if (resourceValue instanceof String) {
String strValue = (String) resourceValue;
if (StringUtils.isEmpty(strValue)) {
ret = matcher == null || matcher.isCompleteMatch(strValue, evalContext);
} else {
ret = matcher != null && matcher.isCompleteMatch(strValue, evalContext);
}
} else { // return false for any other type of resourceValue
ret = false;
}
if (!ret) {
break;
}
}
} else {
LOG.debug("isCompleteMatch(): keysMatch=false. resourceKeys={}; policyKeys={}", resourceKeys, policyKeys);
}
RangerPerfTracer.log(perf);
LOG.debug("<== RangerDefaultPolicyResourceMatcher.isCompleteMatch({}, {}): {}", resource, evalContext, ret);
return ret;
}
@Override
public boolean isCompleteMatch(Map<String, RangerPolicyResource> resources, Map<String, Object> evalContext) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.isCompleteMatch({}, {})", resources, evalContext);
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_MATCH_LOG, "RangerDefaultPolicyResourceMatcher.applyPolicyMatch()");
}
boolean ret = false;
Collection<String> resourceKeys = resources == null ? null : resources.keySet();
Collection<String> policyKeys = policyResources == null ? null : policyResources.keySet();
boolean keysMatch = resourceKeys != null && policyKeys != null && CollectionUtils.isEqualCollection(resourceKeys, policyKeys);
if (keysMatch) {
for (RangerResourceDef resourceDef : serviceDef.getResources()) {
String resourceName = resourceDef.getName();
RangerPolicyResource resourceValues = resources.get(resourceName);
RangerPolicyResource policyValues = policyResources == null ? null : policyResources.get(resourceName);
if (resourceValues == null || CollectionUtils.isEmpty(resourceValues.getValues())) {
ret = (policyValues == null || CollectionUtils.isEmpty(policyValues.getValues()));
} else if (policyValues != null && CollectionUtils.isNotEmpty(policyValues.getValues())) {
ret = CollectionUtils.isEqualCollection(resourceValues.getValues(), policyValues.getValues());
}
if (!ret) {
break;
}
}
} else {
LOG.debug("isCompleteMatch(): keysMatch=false. resourceKeys={}; policyKeys={}", resourceKeys, policyKeys);
}
RangerPerfTracer.log(perf);
LOG.debug("<== RangerDefaultPolicyResourceMatcher.isCompleteMatch({}, {}): {}", resources, evalContext, ret);
return ret;
}
@Override
public boolean getNeedsDynamicEval() {
return needsDynamicEval;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerDefaultPolicyResourceMatcher={");
sb.append("isInitialized=").append(isInitialized).append(", ");
sb.append("matchers={");
if (allMatchers != null) {
for (RangerResourceMatcher matcher : allMatchers.values()) {
sb.append("{").append(matcher).append("} ");
}
}
sb.append("} ");
sb.append("}");
return sb;
}
@Override
public void init() {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.init()");
allMatchers = null;
needsDynamicEval = false;
validResourceHierarchy = null;
isInitialized = false;
String errorText = "";
RangerPerfTracer perf = null;
if (RangerPerfTracer.isPerfTraceEnabled(PERF_POLICY_RESOURCE_MATCHER_INIT_LOG)) {
perf = RangerPerfTracer.getPerfTracer(PERF_POLICY_RESOURCE_MATCHER_INIT_LOG, "RangerDefaultPolicyResourceMatcher.init()");
}
if (policyResources != null && !policyResources.isEmpty() && serviceDef != null) {
serviceDefHelper = serviceDefHelper == null ? new RangerServiceDefHelper(serviceDef, false) : serviceDefHelper;
Set<List<RangerResourceDef>> resourceHierarchies = serviceDefHelper.getResourceHierarchies(policyType, policyResources.keySet());
int validHierarchiesCount = 0;
for (List<RangerResourceDef> resourceHierarchy : resourceHierarchies) {
if (isHierarchyValidForResources(resourceHierarchy, policyResources)) {
validHierarchiesCount++;
if (validHierarchiesCount == 1) {
validResourceHierarchy = resourceHierarchy;
} else {
validResourceHierarchy = null;
}
} else {
LOG.warn("RangerDefaultPolicyResourceMatcher.init(): gaps found in policyResources, skipping hierarchy:[{}]", resourceHierarchies);
}
}
if (validHierarchiesCount > 0) {
allMatchers = new HashMap<>();
for (List<RangerResourceDef> resourceHierarchy : resourceHierarchies) {
for (RangerResourceDef resourceDef : resourceHierarchy) {
String resourceName = resourceDef.getName();
if (allMatchers.containsKey(resourceName)) {
continue;
}
RangerPolicyResource policyResource = policyResources.get(resourceName);
if (policyResource == null) {
LOG.debug("RangerDefaultPolicyResourceMatcher.init(): no matcher created for {}. Continuing ...", resourceName);
continue;
}
RangerResourceMatcher matcher = createResourceMatcher(resourceDef, policyResource);
if (matcher != null) {
if (!needsDynamicEval && matcher.getNeedsDynamicEval()) {
needsDynamicEval = true;
}
allMatchers.put(resourceName, matcher);
} else {
LOG.error("RangerDefaultPolicyResourceMatcher.init(): failed to find matcher for resource {}", resourceName);
allMatchers = null;
errorText = "no matcher found for resource " + resourceName;
break;
}
}
if (allMatchers == null) {
break;
}
}
} else {
errorText = "policyResources elements are not part of any valid resourcedef hierarchy.";
}
} else {
errorText = "policyResources is null or empty, or serviceDef is null.";
}
if (allMatchers == null && policyType != RangerPolicy.POLICY_TYPE_AUDIT) {
serviceDefHelper = null;
validResourceHierarchy = null;
Set<String> policyResourceKeys = policyResources == null ? null : policyResources.keySet();
String serviceDefName = serviceDef == null ? "" : serviceDef.getName();
StringBuilder keysString = new StringBuilder();
if (CollectionUtils.isNotEmpty(policyResourceKeys)) {
for (String policyResourceKeyName : policyResourceKeys) {
keysString.append(policyResourceKeyName).append(" ");
}
}
LOG.error("RangerDefaultPolicyResourceMatcher.init() failed: {} (serviceDef={}, policyResourceKeys={}", errorText, serviceDefName, keysString);
} else {
isInitialized = true;
}
RangerPerfTracer.log(perf);
LOG.debug("<== RangerDefaultPolicyResourceMatcher.init(): ret={}", isInitialized);
}
@Override
public void setPolicy(RangerPolicy policy) {
if (isInitialized) {
LOG.warn("RangerDefaultPolicyResourceMatcher is already initialized. init() must be done again after updating policy");
}
if (policy == null) {
setPolicyResources(null, RangerPolicy.POLICY_TYPE_ACCESS);
} else {
setPolicyResources(policy.getResources(), policy.getPolicyType() == null ? RangerPolicy.POLICY_TYPE_ACCESS : policy.getPolicyType());
}
}
@Override
public void setPolicyResources(Map<String, RangerPolicyResource> policyResources) {
if (isInitialized) {
LOG.warn("RangerDefaultPolicyResourceMatcher is already initialized. init() must be done again after updating policy-resources");
}
setPolicyResources(policyResources, RangerPolicy.POLICY_TYPE_ACCESS);
}
@Override
public void setPolicyResources(Map<String, RangerPolicyResource> policyResources, int policyType) {
this.policyResources = policyResources;
this.policyType = policyType;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
return toString(sb).toString();
}
private List<RangerResourceDef> getMatchingHierarchy(Set<String> resourceKeys) {
List<RangerResourceDef> ret = null;
if (CollectionUtils.isNotEmpty(resourceKeys) && serviceDefHelper != null) {
Set<List<RangerResourceDef>> resourceHierarchies = serviceDefHelper.getResourceHierarchies(policyType, resourceKeys);
// pick the shortest hierarchy
for (List<RangerResourceDef> resourceHierarchy : resourceHierarchies) {
if (ret == null) {
ret = resourceHierarchy;
} else {
if (resourceHierarchy.size() < ret.size()) {
ret = resourceHierarchy;
if (ret.size() == resourceKeys.size()) {
break;
}
}
}
}
}
return ret;
}
private List<RangerResourceDef> getMatchingHierarchy(RangerAccessResource resource) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.getMatchingHierarchy({})", resource);
final List<RangerResourceDef> ret;
Set<String> policyResourcesKeySet = policyResources.keySet();
Set<String> resourceKeySet = resource.getKeys();
if (CollectionUtils.isNotEmpty(resourceKeySet)) {
List<RangerResourceDef> aValidHierarchy = null;
if (validResourceHierarchy != null && serviceDefHelper != null) {
if (serviceDefHelper.hierarchyHasAllResources(validResourceHierarchy, resourceKeySet)) {
aValidHierarchy = validResourceHierarchy;
}
} else {
if (policyResourcesKeySet.containsAll(resourceKeySet)) {
aValidHierarchy = getMatchingHierarchy(policyResourcesKeySet);
} else if (resourceKeySet.containsAll(policyResourcesKeySet)) {
aValidHierarchy = getMatchingHierarchy(resourceKeySet);
}
}
ret = isHierarchyValidForResources(aValidHierarchy, resource.getAsMap()) ? aValidHierarchy : null;
} else {
ret = validResourceHierarchy != null ? validResourceHierarchy : getMatchingHierarchy(policyResourcesKeySet);
}
LOG.debug("<== RangerDefaultPolicyResourceMatcher.getMatchingHierarchy({}): {}", resource, ret);
return ret;
}
private boolean isMatch(final MatchScope scope, final MatchType matchType) {
final boolean ret;
switch (scope) {
case SELF: {
ret = matchType == MatchType.SELF || matchType == MatchType.SELF_AND_ALL_DESCENDANTS;
break;
}
case ANCESTOR: {
ret = matchType == MatchType.ANCESTOR;
break;
}
case DESCENDANT: {
ret = matchType == MatchType.DESCENDANT;
break;
}
case SELF_OR_ANCESTOR: {
ret = matchType == MatchType.SELF || matchType == MatchType.SELF_AND_ALL_DESCENDANTS || matchType == MatchType.ANCESTOR;
break;
}
case SELF_OR_DESCENDANT: {
ret = matchType == MatchType.SELF || matchType == MatchType.SELF_AND_ALL_DESCENDANTS || matchType == MatchType.DESCENDANT;
break;
}
default: {
ret = matchType != MatchType.NONE;
break;
}
}
return ret;
}
private RangerResourceMatcher createResourceMatcher(RangerResourceDef resourceDef, RangerPolicyResource resource) {
LOG.debug("==> RangerDefaultPolicyResourceMatcher.createResourceMatcher({}, {})", resourceDef, resource);
RangerResourceMatcher ret = null;
if (resourceDef != null) {
String resName = resourceDef.getName();
String clsName = resourceDef.getMatcher();
if (pluginContext != null) {
ret = pluginContext.getResourceMatcher(resName, resource);
}
if (ret == null) {
if (!StringUtils.isEmpty(clsName)) {
try {
@SuppressWarnings("unchecked") Class<RangerResourceMatcher> matcherClass = (Class<RangerResourceMatcher>) Class.forName(clsName);
ret = matcherClass.newInstance();
} catch (Exception excp) {
LOG.error("failed to instantiate resource matcher '{}' for '{}'. Default resource matcher will be used", clsName, resName, excp);
}
}
if (ret == null) {
ret = new RangerDefaultResourceMatcher();
}
if (forceEnableWildcardMatch && !Boolean.parseBoolean(resourceDef.getMatcherOptions().get(OPTION_WILD_CARD))) {
resourceDef = serviceDefHelper.getWildcardEnabledResourceDef(resourceDef.getName(), policyType);
}
ret.setResourceDef(resourceDef);
ret.setPolicyResource(resource);
ret.init();
if (pluginContext != null) {
pluginContext.setResourceMatcher(resName, resource, ret);
}
} else {
LOG.debug("Did not create a fresh matcher - used matcher from pluginContext");
}
} else {
LOG.error("RangerDefaultPolicyResourceMatcher: RangerResourceDef is null");
}
LOG.debug("<== RangerDefaultPolicyResourceMatcher.createResourceMatcher({}, {}): {}", resourceDef, resource, ret);
return ret;
}
}
|
apache/solr | 35,465 | solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.io.IOException;
import java.io.Reader;
import java.io.UncheckedIOException;
import java.lang.invoke.MethodHandles;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.function.Supplier;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.InvertableType;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StoredValue;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.StoredFields;
import org.apache.lucene.misc.document.LazyDocument;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentBase;
import org.apache.solr.common.SolrException;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.response.DocsStreamer;
import org.apache.solr.response.ResultContext;
import org.apache.solr.schema.BoolField;
import org.apache.solr.schema.EnumFieldType;
import org.apache.solr.schema.LatLonPointSpatialField;
import org.apache.solr.schema.NumberType;
import org.apache.solr.schema.SchemaField;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A helper class of {@link org.apache.solr.search.SolrIndexSearcher} for stored Document related
* matters including DocValue substitutions.
*/
public class SolrDocumentFetcher {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrIndexSearcher searcher;
private final int nLeaves;
private final boolean enableLazyFieldLoading;
private final SolrCache<Integer, Document> documentCache;
private final Set<String> allStored;
private final Set<String> dvsCanSubstituteStored;
/** Contains the names/patterns of all docValues=true,stored=false fields in the schema. */
private final Set<String> allNonStoredDVs;
/**
* Contains the names/patterns of all docValues=true,stored=false,useDocValuesAsStored=true fields
* in the schema.
*/
private final Set<String> nonStoredDVsUsedAsStored;
/**
* Contains the names/patterns of all docValues=true,stored=false fields, excluding those that are
* copyField targets in the schema.
*/
private final Set<String> nonStoredDVsWithoutCopyTargets;
private static int largeValueLengthCacheThreshold =
Integer.getInteger("solr.largeField.cacheThreshold", 512 * 1024); // internal setting
private final Set<String> largeFields;
private final Collection<String>[] storedHighlightFieldNames; // lazy populated; use getter
private final Collection<String>[] indexedFieldNames; // lazy populated; use getter
private final StoredFields storedFields;
private SolrDocumentFetcher(SolrDocumentFetcher template, StoredFields storedFields) {
this.searcher = template.searcher;
this.nLeaves = template.nLeaves;
this.enableLazyFieldLoading = template.enableLazyFieldLoading;
this.documentCache = template.documentCache;
this.nonStoredDVsUsedAsStored = template.nonStoredDVsUsedAsStored;
this.allNonStoredDVs = template.allNonStoredDVs;
this.nonStoredDVsWithoutCopyTargets = template.nonStoredDVsWithoutCopyTargets;
this.largeFields = template.largeFields;
this.dvsCanSubstituteStored = template.dvsCanSubstituteStored;
this.allStored = template.allStored;
this.storedHighlightFieldNames = template.indexedFieldNames;
this.indexedFieldNames = template.indexedFieldNames;
this.storedFields = storedFields;
}
@Override
protected SolrDocumentFetcher clone() {
try {
return new SolrDocumentFetcher(this, searcher.getIndexReader().storedFields());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
SolrDocumentFetcher(SolrIndexSearcher searcher, SolrConfig solrConfig, boolean cachingEnabled) {
this.searcher = searcher;
this.nLeaves = searcher.getTopReaderContext().leaves().size();
if (cachingEnabled) {
documentCache =
solrConfig.documentCacheConfig == null
? null
: solrConfig.documentCacheConfig.newInstance();
} else {
documentCache = null;
}
// lazy loading makes no sense if we don't have a `documentCache`
this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading && documentCache != null;
final Set<String> nonStoredDVsUsedAsStored = new HashSet<>();
final Set<String> allNonStoredDVs = new HashSet<>();
final Set<String> nonStoredDVsWithoutCopyTargets = new HashSet<>();
final Set<String> storedLargeFields = new HashSet<>();
final Set<String> dvsCanSubstituteStored = new HashSet<>();
final Set<String> allStoreds = new HashSet<>();
// can find materialized dynamic fields, unlike using the Solr IndexSchema.
for (FieldInfo fieldInfo : searcher.getFieldInfos()) {
final SchemaField schemaField = searcher.getSchema().getFieldOrNull(fieldInfo.name);
if (schemaField == null) {
continue;
}
if (canSubstituteDvForStored(fieldInfo, schemaField)) {
dvsCanSubstituteStored.add(fieldInfo.name);
}
if (schemaField.stored()) {
allStoreds.add(fieldInfo.name);
}
if (!schemaField.stored() && schemaField.hasDocValues()) {
if (schemaField.useDocValuesAsStored()) {
nonStoredDVsUsedAsStored.add(fieldInfo.name);
}
allNonStoredDVs.add(fieldInfo.name);
if (!searcher.getSchema().isCopyFieldTarget(schemaField)) {
nonStoredDVsWithoutCopyTargets.add(fieldInfo.name);
}
}
if (schemaField.stored() && schemaField.isLarge()) {
storedLargeFields.add(schemaField.getName());
}
}
this.nonStoredDVsUsedAsStored = Collections.unmodifiableSet(nonStoredDVsUsedAsStored);
this.allNonStoredDVs = Collections.unmodifiableSet(allNonStoredDVs);
this.nonStoredDVsWithoutCopyTargets =
Collections.unmodifiableSet(nonStoredDVsWithoutCopyTargets);
this.largeFields = Collections.unmodifiableSet(storedLargeFields);
this.dvsCanSubstituteStored = Collections.unmodifiableSet(dvsCanSubstituteStored);
this.allStored = Collections.unmodifiableSet(allStoreds);
this.storedFields = null; // template docFetcher should throw NPE if used directly
this.storedHighlightFieldNames = new Collection[1];
this.indexedFieldNames = new Collection[1];
}
// Does this field have both stored=true and docValues=true and is otherwise
// eligible for getting the field's value from DV?
private boolean canSubstituteDvForStored(FieldInfo fieldInfo, SchemaField schemaField) {
if (!schemaField.hasDocValues() || !schemaField.stored()) return false;
if (schemaField.multiValued()) return false;
DocValuesType docValuesType = fieldInfo.getDocValuesType();
NumberType numberType = schemaField.getType().getNumberType();
// can not decode a numeric without knowing its numberType
if (numberType == null
&& (docValuesType == DocValuesType.SORTED_NUMERIC
|| docValuesType == DocValuesType.NUMERIC)) {
return false;
}
return true;
}
public boolean isLazyFieldLoadingEnabled() {
return enableLazyFieldLoading;
}
public SolrCache<Integer, Document> getDocumentCache() {
return documentCache;
}
/**
* Returns a collection of the names of all stored fields which can be highlighted the index
* reader knows about.
*/
public Collection<String> getStoredHighlightFieldNames() {
synchronized (storedHighlightFieldNames) {
if (storedHighlightFieldNames[0] == null) {
Collection<String> storedHighlightFieldNames = new ArrayList<>();
for (FieldInfo fieldInfo : searcher.getFieldInfos()) {
final String fieldName = fieldInfo.name;
try {
SchemaField field = searcher.getSchema().getField(fieldName);
if (field.stored()
&& ((field.getType() instanceof org.apache.solr.schema.TextField)
|| (field.getType() instanceof org.apache.solr.schema.StrField))) {
storedHighlightFieldNames.add(fieldName);
}
} catch (RuntimeException e) {
// getField() throws a SolrException, but it arrives as a RuntimeException
log.warn("Field [{}] found in index, but not defined in schema.", fieldName);
}
}
this.storedHighlightFieldNames[0] = storedHighlightFieldNames;
}
return storedHighlightFieldNames[0];
}
}
/** Returns a collection of the names of all indexed fields which the index reader knows about. */
public Collection<String> getIndexedFieldNames() {
synchronized (indexedFieldNames) {
if (indexedFieldNames[0] == null) {
Collection<String> indexedFieldNames = new ArrayList<>();
for (FieldInfo fieldInfo : searcher.getFieldInfos()) {
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
indexedFieldNames.add(fieldInfo.name);
}
}
this.indexedFieldNames[0] = indexedFieldNames;
}
return indexedFieldNames[0];
}
}
public Document doc(int docId) throws IOException {
return doc(docId, (Set<String>) null);
}
/**
* Retrieve the {@link Document} instance corresponding to the document id.
*
* <p><b>NOTE</b>: the document will have all fields accessible, but if a field filter is
* provided, only the provided fields will be loaded (the remainder will be available lazily).
*/
public Document doc(int i, Set<String> fields) throws IOException {
Document d;
if (documentCache != null) {
final Set<String> getFields = enableLazyFieldLoading ? fields : null;
d = documentCache.computeIfAbsent(i, docId -> docNC(docId, getFields));
if (d == null) {
// failed to retrieve due to an earlier exception, try again?
return docNC(i, fields);
} else {
return d;
}
} else {
return docNC(i, fields);
}
}
private Document docNC(int i, Set<String> fields) throws IOException {
final SolrDocumentStoredFieldVisitor visitor =
new SolrDocumentStoredFieldVisitor(fields, searcher.getIndexReader(), i);
storedFields.document(i, visitor);
return visitor.getDocument();
}
/**
* This is an optimized version for populating a SolrDocument that:
*
* <p>1. fetches all fields from docValues if possible. If no decompression of the stored data is
* necessary, we can avoid a disk seek and decompression cycle. This step is only used if all
* requested fields are {code docValues=true stored=false multiValued=false}. This last
* restriction because multiValued docValues fields do not faithfully reflect the input order in
* all cases. the values are returned and no decompression is necessary.
*
* <p>2. if 1 is impossible, try to fetch all requested fields from the stored values. If the
* stored data has to be decompressed anyway, it's more efficient to just get all field values
* from the stored values. If we got all the requested fields, return.
*
* <p>3. add fields where docValues=true stored=false thus could not be fetched in step 2
*
* @param luceneDocId The Lucene doc ID
* @param solrReturnFields the structure holding the fields to be returned. The first time this
* method is called for a particular document list, it will be modified by adding a
* RetrieveFieldsOptimizer for use in future calls.
* @return The SolrDocument with values requested.
* <p>This method is designed to be as simple as possible to use, just call it. e.g. {code
* SolrDocument sdoc = docFetcher.solrDoc(id, solrReturnFields);} then process the resulting
* SolrDocument as usual. Subsequent calls with the same solrReturnFields will re-use the
* optimizer created the first time.
* <p>NOTE: DO NOT re-use the same SolrReturnFields object if the fields requested change.
*/
public SolrDocument solrDoc(int luceneDocId, SolrReturnFields solrReturnFields) {
Supplier<RetrieveFieldsOptimizer> rfoSupplier =
() -> new RetrieveFieldsOptimizer(solrReturnFields);
return solrReturnFields.getFetchOptimizer(rfoSupplier).getSolrDoc(luceneDocId);
}
/**
* {@link StoredFieldVisitor} which loads the specified fields eagerly (or all if null). If {@link
* #enableLazyFieldLoading} then the rest get special lazy field entries. Designated "large"
* fields will always get a special field entry.
*/
private class SolrDocumentStoredFieldVisitor extends DocumentStoredFieldVisitor {
private final Document doc;
private final LazyDocument
lazyFieldProducer; // arguably a better name than LazyDocument; at least how we use it here
private final int docId;
private final boolean addLargeFieldsLazily;
SolrDocumentStoredFieldVisitor(Set<String> toLoad, IndexReader reader, int docId) {
super(toLoad);
this.docId = docId;
this.doc = getDocument();
this.lazyFieldProducer =
toLoad != null && enableLazyFieldLoading ? new LazyDocument(reader, docId) : null;
this.addLargeFieldsLazily = (documentCache != null && !largeFields.isEmpty());
// TODO can we return Status.STOP after a val is loaded and we know there are no other fields
// of interest?
// When: toLoad is one single-valued field, no lazyFieldProducer
}
@Override
public void stringField(FieldInfo fieldInfo, String value) throws IOException {
Predicate<String> readAsBytes = ResultContext.READASBYTES.get();
if (readAsBytes != null && readAsBytes.test(fieldInfo.name)) {
final FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setStoreTermVectors(fieldInfo.hasTermVectors());
ft.setOmitNorms(fieldInfo.omitsNorms());
ft.setIndexOptions(fieldInfo.getIndexOptions());
Objects.requireNonNull(value, "String value should not be null");
doc.add(new StoredField(fieldInfo.name, value, ft));
} else {
super.stringField(fieldInfo, value);
}
}
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
Status status = super.needsField(fieldInfo);
assert status != Status.STOP : "Status.STOP not supported or expected";
// load "large" fields using this lazy mechanism
if (addLargeFieldsLazily && largeFields.contains(fieldInfo.name)) {
if (lazyFieldProducer != null || status == Status.YES) {
doc.add(new LargeLazyField(fieldInfo.name, docId));
}
return Status.NO;
}
if (status == Status.NO && lazyFieldProducer != null) { // lazy
doc.add(lazyFieldProducer.getField(fieldInfo));
}
return status;
}
}
/** Visit a document's fields using a {@link StoredFieldVisitor}. */
public void doc(int docId, StoredFieldVisitor visitor) throws IOException {
if (documentCache != null) {
// get cached document or retrieve it including all fields (and cache it)
Document cached = doc(docId);
visitFromCached(cached, visitor);
} else {
storedFields.document(docId, visitor);
}
}
/** Executes a stored field visitor against a hit from the document cache */
private void visitFromCached(Document document, StoredFieldVisitor visitor) throws IOException {
for (IndexableField f : document) {
final FieldInfo info = searcher.getFieldInfos().fieldInfo(f.name());
final StoredFieldVisitor.Status needsField = visitor.needsField(info);
if (needsField == StoredFieldVisitor.Status.STOP) return;
if (needsField == StoredFieldVisitor.Status.NO) continue;
BytesRef binaryValue = f.binaryValue();
if (binaryValue != null) {
visitor.binaryField(info, toByteArrayUnwrapIfPossible(binaryValue));
continue;
}
Number numericValue = f.numericValue();
if (numericValue != null) {
if (numericValue instanceof Double) {
visitor.doubleField(info, numericValue.doubleValue());
} else if (numericValue instanceof Integer) {
visitor.intField(info, numericValue.intValue());
} else if (numericValue instanceof Float) {
visitor.floatField(info, numericValue.floatValue());
} else if (numericValue instanceof Long) {
visitor.longField(info, numericValue.longValue());
} else {
throw new AssertionError();
}
continue;
}
// must be String
if (f instanceof LargeLazyField) { // optimization to avoid premature string conversion
visitor.stringField(info, toStringUnwrapIfPossible(((LargeLazyField) f).readBytes()));
} else {
visitor.stringField(info, f.stringValue());
}
}
}
private byte[] toByteArrayUnwrapIfPossible(BytesRef bytesRef) {
if (bytesRef.offset == 0 && bytesRef.bytes.length == bytesRef.length) {
return bytesRef.bytes;
} else {
return Arrays.copyOfRange(bytesRef.bytes, bytesRef.offset, bytesRef.offset + bytesRef.length);
}
}
private String toStringUnwrapIfPossible(BytesRef bytesRef) {
if (bytesRef.offset == 0 && bytesRef.bytes.length == bytesRef.length) {
return new String(bytesRef.bytes, StandardCharsets.UTF_8);
} else {
return new String(
bytesRef.bytes,
bytesRef.offset,
bytesRef.offset + bytesRef.length,
StandardCharsets.UTF_8);
}
}
/**
* Unlike LazyDocument.LazyField, we (a) don't cache large values, and (b) provide access to the
* byte[].
*/
class LargeLazyField implements IndexableField {
final String name;
final int docId;
// synchronize on 'this' to access:
BytesRef cachedBytes; // we only conditionally populate this if it's big enough
private LargeLazyField(String name, int docId) {
this.name = name;
this.docId = docId;
}
@Override
public String toString() {
return fieldType().toString() + "<" + name() + ">"; // mimic Field.java
}
@Override
public String name() {
return name;
}
@Override
public IndexableFieldType fieldType() {
return searcher.getSchema().getField(name());
}
@Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
// or we could throw unsupported exception?
return analyzer.tokenStream(name(), stringValue());
}
/** (for tests) */
synchronized boolean hasBeenLoaded() {
return cachedBytes != null;
}
@Override
public synchronized String stringValue() {
try {
return readBytes().utf8ToString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
synchronized BytesRef readBytes() throws IOException {
if (cachedBytes != null) {
return cachedBytes;
} else {
BytesRef bytesRef = new BytesRef();
storedFields.document(
docId,
new StoredFieldVisitor() {
boolean done = false;
@Override
public Status needsField(FieldInfo fieldInfo) throws IOException {
if (done) {
return Status.STOP;
}
return fieldInfo.name.equals(name()) ? Status.YES : Status.NO;
}
@Override
public void stringField(FieldInfo fieldInfo, String value) throws IOException {
Objects.requireNonNull(value, "String value should not be null");
bytesRef.bytes = value.getBytes(StandardCharsets.UTF_8);
bytesRef.length = bytesRef.bytes.length;
done = true;
}
@Override
public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException {
throw new UnsupportedOperationException(
"'large' binary fields are not (yet) supported");
}
});
if (bytesRef.length < largeValueLengthCacheThreshold) {
return cachedBytes = bytesRef;
} else {
return bytesRef;
}
}
}
@Override
public BytesRef binaryValue() {
return null;
}
@Override
public Reader readerValue() {
return null;
}
@Override
public Number numericValue() {
return null;
}
@Override
public StoredValue storedValue() {
return new StoredValue(stringValue());
}
@Override
public InvertableType invertableType() {
return null;
}
}
/**
* This will fetch and add the docValues fields to a given SolrDocument/SolrInputDocument
*
* @param doc A SolrDocument or SolrInputDocument instance where docValues will be added
* @param docid The lucene docid of the document to be populated
* @param fields The fields with docValues to populate the document with. DocValues fields which
* do not exist or not decodable will be ignored.
*/
public void decorateDocValueFields(
SolrDocumentBase<?, ?> doc,
int docid,
Set<String> fields,
DocValuesIteratorCache reuseDvIters)
throws IOException {
final List<LeafReaderContext> leafContexts = searcher.getLeafContexts();
final int subIndex = ReaderUtil.subIndex(docid, leafContexts);
final int localId = docid - leafContexts.get(subIndex).docBase;
final LeafReader leafReader = leafContexts.get(subIndex).reader();
for (String fieldName : fields) {
DocValuesIteratorCache.FieldDocValuesSupplier e = reuseDvIters.getSupplier(fieldName);
if (e != null) {
Object fieldValue = decodeDVField(localId, leafReader, subIndex, e);
if (fieldValue != null) {
doc.setField(fieldName, fieldValue);
}
}
}
}
/**
* Decode value from DV field for a document
*
* @return null if DV field is not exist or can not decodable
*/
private Object decodeDVField(
int localId,
LeafReader leafReader,
int readerOrd,
DocValuesIteratorCache.FieldDocValuesSupplier e)
throws IOException {
final DocValuesType dvType = e.type;
switch (dvType) {
case NUMERIC:
final NumericDocValues ndv = e.getNumericDocValues(localId, leafReader, readerOrd);
if (ndv == null) {
return null;
}
long val = ndv.longValue();
return decodeNumberFromDV(e.schemaField, val, false);
case BINARY:
BinaryDocValues bdv = e.getBinaryDocValues(localId, leafReader, readerOrd);
if (bdv != null) {
return BytesRef.deepCopyOf(bdv.binaryValue()).bytes;
}
return null;
case SORTED:
SortedDocValues sdv = e.getSortedDocValues(localId, leafReader, readerOrd);
if (sdv != null) {
final BytesRef bRef = sdv.lookupOrd(sdv.ordValue());
// Special handling for Boolean fields since they're stored as 'T' and 'F'.
if (e.schemaField.getType() instanceof BoolField) {
return e.schemaField.getType().toObject(e.schemaField, bRef);
} else {
return bRef.utf8ToString();
}
}
return null;
case SORTED_NUMERIC:
final SortedNumericDocValues numericDv =
e.getSortedNumericDocValues(localId, leafReader, readerOrd);
if (numericDv != null) {
final int docValueCount = numericDv.docValueCount();
final List<Object> outValues = new ArrayList<>(docValueCount);
for (int i = 0; i < docValueCount; i++) {
long number = numericDv.nextValue();
Object value = decodeNumberFromDV(e.schemaField, number, true);
// return immediately if the number is not decodable, hence won't return an empty list.
if (value == null) {
return null;
}
// normally never true but LatLonPointSpatialField uses SORTED_NUMERIC even when single
// valued
else if (e.schemaField.multiValued() == false) {
return value;
} else {
outValues.add(value);
}
}
assert outValues.size() > 0;
return outValues;
}
return null;
case SORTED_SET:
final SortedSetDocValues values = e.getSortedSetDocValues(localId, leafReader, readerOrd);
if (values != null) {
final List<Object> outValues = new ArrayList<>();
for (int o = 0; o < values.docValueCount(); o++) {
long ord = values.nextOrd();
BytesRef value = values.lookupOrd(ord);
outValues.add(e.schemaField.getType().toObject(e.schemaField, value));
}
assert outValues.size() > 0;
return outValues;
}
return null;
default:
throw new IllegalStateException();
}
}
private Object decodeNumberFromDV(SchemaField schemaField, long value, boolean sortableNumeric) {
// note: This special-case is unfortunate; if we have to add any more than perhaps the fieldType
// should have this method so that specific field types can customize it.
if (schemaField.getType() instanceof LatLonPointSpatialField) {
return LatLonPointSpatialField.decodeDocValueToString(value);
}
if (schemaField.getType().getNumberType() == null) {
log.warn(
"Couldn't decode docValues for field: [{}], schemaField: [{}], numberType is unknown",
schemaField.getName(),
schemaField);
return null;
}
switch (schemaField.getType().getNumberType()) {
case INTEGER:
final int raw = (int) value;
if (schemaField.getType() instanceof EnumFieldType) {
return ((EnumFieldType) schemaField.getType())
.getEnumMapping()
.intValueToStringValue(raw);
} else {
return raw;
}
case LONG:
return value;
case FLOAT:
if (sortableNumeric) {
return NumericUtils.sortableIntToFloat((int) value);
} else {
return Float.intBitsToFloat((int) value);
}
case DOUBLE:
if (sortableNumeric) {
return NumericUtils.sortableLongToDouble(value);
} else {
return Double.longBitsToDouble(value);
}
case DATE:
return new Date(value);
default:
// catched all possible values, this line will never be reached
throw new AssertionError();
}
}
public Set<String> getDvsCanSubstituteStored() {
return dvsCanSubstituteStored;
}
public Set<String> getAllStored() {
return allStored;
}
/**
* Returns an unmodifiable set of non-stored docValues field names.
*
* @param onlyUseDocValuesAsStored If false, returns all non-stored docValues. If true, returns
* only those non-stored docValues which have the {@link SchemaField#useDocValuesAsStored()}
* flag true.
*/
public Set<String> getNonStoredDVs(boolean onlyUseDocValuesAsStored) {
return onlyUseDocValuesAsStored ? nonStoredDVsUsedAsStored : allNonStoredDVs;
}
/**
* Returns an unmodifiable set of names of non-stored docValues fields, except those that are
* targets of a copy field.
*/
public Set<String> getNonStoredDVsWithoutCopyTargets() {
return nonStoredDVsWithoutCopyTargets;
}
/**
* Moved as a private class here, we consider it an impelmentation detail. It should not be
* exposed outside of this class.
*
* <p>This class is in charge of insuring that SolrDocuments can have their fields populated
* during a request in the most efficient way possible. See the comments at {@link #solrDoc(int
* docId, SolrReturnFields solrReturnFields)}
*/
class RetrieveFieldsOptimizer {
// null means get all available stored fields
private final Set<String> storedFields;
// always non null
private final Set<String> dvFields;
private final SolrReturnFields solrReturnFields;
private final DocValuesIteratorCache reuseDvIters;
RetrieveFieldsOptimizer(SolrReturnFields solrReturnFields) {
this.storedFields = calcStoredFieldsForReturn(solrReturnFields);
this.dvFields = calcDocValueFieldsForReturn(solrReturnFields);
this.solrReturnFields = solrReturnFields;
if (storedFields != null && dvsCanSubstituteStored.containsAll(storedFields)) {
dvFields.addAll(storedFields);
storedFields.clear();
}
reuseDvIters = dvFields.isEmpty() ? null : new DocValuesIteratorCache(searcher);
}
/**
* Sometimes we could fetch a field value from either the stored document or docValues. Such
* fields have both and are single-valued. If choosing docValues allows us to avoid accessing
* the stored document altogether for all fields to be returned then we do it, otherwise we
* prefer the stored value when we have a choice.
*/
private boolean returnStoredFields() {
return !(storedFields != null && storedFields.isEmpty());
}
private boolean returnDVFields() {
return !dvFields.isEmpty();
}
private Set<String> getStoredFields() {
return storedFields;
}
private Set<String> getDvFields() {
return dvFields;
}
// who uses all of these?
private ReturnFields getReturnFields() {
return solrReturnFields;
}
private Set<String> calcStoredFieldsForReturn(ReturnFields returnFields) {
final Set<String> storedFields = new HashSet<>();
Set<String> fnames = returnFields.getLuceneFieldNames();
if (returnFields.wantsAllFields()) {
return null;
} else if (returnFields.hasPatternMatching()) {
for (String s : getAllStored()) {
if (returnFields.wantsField(s)) {
storedFields.add(s);
}
}
} else if (fnames != null) {
storedFields.addAll(fnames);
storedFields.removeIf(
(String name) -> {
SchemaField schemaField = searcher.getSchema().getFieldOrNull(name);
if (schemaField == null) {
// Get it from the stored fields if, for some reason, we can't get the schema.
return false;
}
if (schemaField.stored() && schemaField.multiValued()) {
// must return multivalued fields from stored data if possible.
return false;
}
if (schemaField.stored() == false) {
// if it's not stored, no choice but to return from DV.
return true;
}
return false;
});
}
storedFields.remove(SolrReturnFields.SCORE);
return storedFields;
}
private Set<String> calcDocValueFieldsForReturn(ReturnFields returnFields) {
// always return not null
final Set<String> result = new HashSet<>();
if (returnFields.wantsAllFields()) {
result.addAll(getNonStoredDVs(true));
// check whether there are no additional fields
Set<String> fieldNames = returnFields.getLuceneFieldNames(true);
if (fieldNames != null) {
// add all requested fields that may be useDocValuesAsStored=false
for (String fl : fieldNames) {
if (getNonStoredDVs(false).contains(fl)) {
result.add(fl);
}
}
}
} else if (returnFields.hasPatternMatching()) {
for (String s : getNonStoredDVs(true)) {
if (returnFields.wantsField(s)) {
result.add(s);
}
}
} else {
Set<String> fnames = returnFields.getLuceneFieldNames();
if (fnames != null) {
result.addAll(fnames);
// here we get all non-stored dv fields because even if a user has set
// useDocValuesAsStored=false in schema, he may have requested a field
// explicitly using the fl parameter
result.retainAll(getNonStoredDVs(false));
}
}
return result;
}
private SolrDocument getSolrDoc(int luceneDocId) {
SolrDocument sdoc = null;
try {
if (returnStoredFields()) {
Document doc = doc(luceneDocId, getStoredFields());
// make sure to use the schema from the searcher and not the request (cross-core)
sdoc =
DocsStreamer.convertLuceneDocToSolrDoc(doc, searcher.getSchema(), getReturnFields());
if (returnDVFields() == false) {
solrReturnFields.setFieldSources(SolrReturnFields.FIELD_SOURCES.ALL_FROM_STORED);
return sdoc;
} else {
solrReturnFields.setFieldSources(SolrReturnFields.FIELD_SOURCES.MIXED_SOURCES);
}
} else {
// no need to get stored fields of the document, see SOLR-5968
sdoc = new SolrDocument();
solrReturnFields.setFieldSources(SolrReturnFields.FIELD_SOURCES.ALL_FROM_DV);
}
// decorate the document with non-stored docValues fields
if (returnDVFields()) {
decorateDocValueFields(sdoc, luceneDocId, getDvFields(), reuseDvIters);
}
} catch (IOException e) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Error reading document with docId " + luceneDocId,
e);
}
return sdoc;
}
}
}
|
apache/felix-dev | 35,359 | http/jetty/src/main/java/org/apache/felix/http/jetty/internal/ConfigMetaTypeProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.http.jetty.internal;
import java.io.InputStream;
import java.util.ArrayList;
import org.apache.felix.http.base.internal.HttpConfig;
import org.apache.felix.http.base.internal.logger.SystemLogger;
import org.eclipse.jetty.server.CustomRequestLog;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.server.session.HouseKeeper;
import org.osgi.framework.Bundle;
import org.osgi.service.metatype.AttributeDefinition;
import org.osgi.service.metatype.MetaTypeProvider;
import org.osgi.service.metatype.ObjectClassDefinition;
class ConfigMetaTypeProvider implements MetaTypeProvider
{
private final Bundle bundle;
public ConfigMetaTypeProvider(final Bundle bundle)
{
this.bundle = bundle;
}
/**
* @see org.osgi.service.metatype.MetaTypeProvider#getLocales()
*/
@Override
public String[] getLocales()
{
return null;
}
/**
* @see org.osgi.service.metatype.MetaTypeProvider#getObjectClassDefinition(java.lang.String, java.lang.String)
*/
@Override
public ObjectClassDefinition getObjectClassDefinition( String id, String locale )
{
if ( !JettyService.PID.equals( id ) )
{
return null;
}
final ArrayList<AttributeDefinition> adList = new ArrayList<>();
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HOST,
"Host Name",
"IP Address or Host Name of the interface to which HTTP and HTTPS bind. The default is " +
"\"0.0.0.0\" indicating all interfaces.",
"0.0.0.0",
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HOST)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_ENABLE,
"Enable HTTP",
"Whether or not HTTP is enabled. Defaults to true thus HTTP enabled.",
true,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.HTTP_PORT,
"HTTP Port",
"Port to listen on for HTTP requests. Defaults to 8080.",
8080,
bundle.getBundleContext().getProperty(JettyConfig.HTTP_PORT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.HTTP_TIMEOUT,
"Connection Timeout",
"Time limit for reaching an timeout specified in milliseconds. This property applies to both HTTP and HTTP connections. Defaults to 60 seconds.",
60000,
bundle.getBundleContext().getProperty(JettyConfig.HTTP_TIMEOUT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTPS_ENABLE,
"Enable HTTPS",
"Whether or not HTTPS is enabled. Defaults to false thus HTTPS disabled.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTPS_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.HTTPS_PORT,
"HTTPS Port",
"Port to listen on for HTTPS requests. Defaults to 443.",
443,
bundle.getBundleContext().getProperty(JettyConfig.HTTPS_PORT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_KEYSTORE,
"Keystore",
"Absolute Path to the Keystore to use for HTTPS. Only used if HTTPS is enabled in which case this property is required.",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_KEYSTORE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_KEYSTORE_PASSWORD,
"Keystore Password",
"Password to access the Keystore. Only used if HTTPS is enabled."));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_KEYSTORE_KEY_PASSWORD,
"Key Password",
"Password to unlock the secret key from the Keystore. Only used if HTTPS is enabled."));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_TRUSTSTORE,
"Truststore",
"Absolute Path to the Truststore to use for HTTPS. Only used if HTTPS is enabled.",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_TRUSTSTORE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_TRUSTSTORE_PASSWORD,
"Truststore Password",
"Password to access the Truststore. Only used if HTTPS is enabled."));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTPS_CLIENT_CERT,
"Client Certificate",
"Requirement for the Client to provide a valid certificate. Defaults to none.",
AttributeDefinition.STRING,
new String[] {"none"},
0,
new String[] {"No Client Certificate", "Client Certificate Wanted", "Client Certificate Needed"},
new String[] {"none", "wants", "needs"},
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTPS_CLIENT_CERT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_CONTEXT_PATH,
"Context Path",
"The Servlet Context Path to use for the Http Service. If this property is not configured it " +
"defaults to \"/\". This must be a valid path starting with a slash and not ending with a slash (unless it is the root context).",
"/",
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_CONTEXT_PATH)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_MBEANS,
"Register MBeans",
"Whether or not to use register JMX MBeans from the servlet container (Jetty). If this is " +
"enabled Jetty Request and Connector statistics are also added. The default is to not enable JMX.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_MBEANS)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_STATISTICS_HANDLER_ENABLE,
"Enable Statistics",
"Whether or not to use enable Statistics in the servlet container (Jetty).",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_STATISTICS_HANDLER_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_SESSION_TIMEOUT,
"Session Timeout",
"Default lifetime of an HTTP session specified in a whole number of minutes. If the timeout is 0 or less, sessions will by default never timeout. The default is 0.",
0,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_SESSION_TIMEOUT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_THREADPOOL_MAX,
"Thread Pool Max",
"Maximum number of jetty threads. Using the default -1 uses Jetty's default (200).",
-1,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_THREADPOOL_MAX)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_ACCEPTORS,
"Acceptors",
"Number of acceptor threads to use, or -1 for a default value. Acceptors accept new TCP/IP connections. If 0, then the selector threads are used to accept connections.",
-1,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_ACCEPTORS)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SELECTORS,
"Selectors",
"Number of selector threads, or <=0 for a default value. Selectors notice and schedule established connection that can make IO progress.",
-1,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SELECTORS)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_HEADER_BUFFER_SIZE,
"Header Buffer Size",
"Size of the buffer for request and response headers. Default is 16KB.",
16384,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_HEADER_BUFFER_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_REQUEST_BUFFER_SIZE,
"Request Buffer Size",
"Size of the buffer for requests not fitting the header buffer. Default is 8KB.",
8192,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_REQUEST_BUFFER_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_RESPONSE_BUFFER_SIZE,
"Response Buffer Size",
"Size of the buffer for responses. Default is 24KB.",
24576,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_RESPONSE_BUFFER_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_MAX_FORM_SIZE,
"Maximum Form Size",
"Size of Body for submitted form content. Default is 200KB.",
204800,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_MAX_FORM_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_PATH_EXCLUSIONS,
"Path Exclusions",
"Contains a list of context path prefixes. If a Web Application Bundle is started with a context path matching any " +
"of these prefixes, it will not be deployed in the servlet container.",
AttributeDefinition.STRING,
new String[] {"/system"},
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_PATH_EXCLUSIONS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_EXCLUDED_SUITES,
"Excluded Cipher Suites",
"List of cipher suites that should be excluded. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_EXCLUDED_SUITES))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_INCLUDED_SUITES,
"Included Cipher Suites",
"List of cipher suites that should be included. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_INCLUDED_SUITES))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SEND_SERVER_HEADER,
"Send Server Header",
"If enabled, the server header is sent.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SEND_SERVER_HEADER)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_INCLUDED_PROTOCOLS,
"Included Protocols",
"List of SSL protocols to include by default. Protocols may be any supported by the Java " +
"platform such as SSLv2Hello, SSLv3, TLSv1, TLSv1.1, or TLSv1.2. Any listed protocol " +
"not supported is silently ignored. Default is none assuming to use any protocol enabled " +
"and supported on the platform.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_INCLUDED_PROTOCOLS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_EXCLUDED_PROTOCOLS,
"Excluded Protocols",
"List of SSL protocols to exclude. This property further restricts the enabled protocols by " +
"explicitly disabling. Any protocol listed in both this property and the Included " +
"protocols property is excluded. Default is none such as to accept all protocols enabled " +
"on platform or explicitly listed by the Included protocols property.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_EXCLUDED_PROTOCOLS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_PROXY_LOAD_BALANCER_CONNECTION_ENABLE,
"Enable Proxy/Load Balancer Connection",
"Whether or not the Proxy/Load Balancer Connection is enabled. Defaults to false thus disabled.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_PROXY_LOAD_BALANCER_CONNECTION_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_RENEGOTIATION_ALLOWED,
"Renegotiation allowed",
"Whether TLS renegotiation is allowed (true by default)",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_RENEGOTIATION_ALLOWED)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SESSION_COOKIE_HTTP_ONLY,
"Session Cookie httpOnly",
"Session Cookie httpOnly (true by default)",
true,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SESSION_COOKIE_HTTP_ONLY)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SESSION_COOKIE_SECURE,
"Session Cookie secure",
"Session Cookie secure (false by default)",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SESSION_COOKIE_SECURE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_URI_COMPLIANCE_MODE,
"Jetty URI compliance mode",
"Jetty URI compliance mode (if not set, Jetty will configure a default)",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_URI_COMPLIANCE_MODE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_SESSION_ID_PATH_PARAMETER_NAME,
"Session Id path parameter",
"Defaults to jsessionid. If set to null or \"none\" no URL rewriting will be done.",
"jsessionid",
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_ID_PATH_PARAMETER_NAME)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_CHECK_REMOTE_SESSION_ENCODING,
"Check remote session encoding",
"If true, Jetty will add JSESSIONID parameter even when encoding external urls with calls to encodeURL() (true by default)",
true,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_CHECK_REMOTE_SESSION_ENCODING)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_SESSION_COOKIE_NAME,
"Session Cookie Name",
"Session Cookie Name",
"JSESSIONID",
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_COOKIE_NAME)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_SESSION_DOMAIN,
"Session Domain",
"If this property is set, then it is used as the domain for session cookies. If it is not set, then no domain is specified for the session cookie. Default is none.",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_DOMAIN)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_SESSION_PATH,
"Session Path",
"If this property is set, then it is used as the path for the session cookie. Default is context path.",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_DOMAIN)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SERVLET_SESSION_MAX_AGE,
"Session Max Age",
"Max age for the session cookie. Default is -1.",
-1,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SERVLET_SESSION_MAX_AGE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_SESSION_SCAVENGING_INTERVAL,
"Session Scavenging Interval",
"Interval of session scavenging in seconds. Default is " + String.valueOf(HouseKeeper.DEFAULT_PERIOD_MS / 1000),
HouseKeeper.DEFAULT_PERIOD_MS / 1000,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_SESSION_SCAVENGING_INTERVAL)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_SERVICE_NAME,
"HTTP Service Name",
"HTTP Service Name used in service filter to target specific HTTP instance. Default is null.",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_SERVICE_NAME)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_HANDLER_ENABLE,
"Enable GzipHandler",
"Whether the server should use a server-wide gzip handler. Default is false.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_HANDLER_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_MIN_GZIP_SIZE,
"Gzip Min Size",
String.format("The minimum response size to trigger dynamic compression. Default is %d.", GzipHandler.DEFAULT_MIN_GZIP_SIZE),
GzipHandler.DEFAULT_MIN_GZIP_SIZE,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_MIN_GZIP_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_INFLATE_BUFFER_SIZE,
"Gzip Inflate Buffer Size",
"The size in bytes of the buffer to inflate compressed request, or <= 0 for no inflation. Default is -1.",
-1,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_INFLATE_BUFFER_SIZE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_SYNC_FLUSH,
"Gzip Sync Flush",
"True if Deflater#SYNC_FLUSH should be used, else Deflater#NO_FLUSH will be used. Default is false.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_SYNC_FLUSH)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_METHODS,
"Gzip Include Methods",
"The additional http methods to include in compression. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_METHODS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_METHODS,
"Gzip Exclude Methods",
"The additional http methods to exclude in compression. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_METHODS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_PATHS,
"Gzip Included Paths",
"The additional path specs to include. Inclusion takes precedence over exclusion. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_PATHS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_PATHS,
"Gzip Excluded Paths",
"The additional path specs to exclude. Inclusion takes precedence over exclusion. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_PATHS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_MIME_TYPES,
"Gzip Included Mime Types",
"The included mime types. Inclusion takes precedence over exclusion. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_INCLUDED_MIME_TYPES))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_MIME_TYPES,
"Gzip Excluded Mime Types",
"The excluded mime types. Inclusion takes precedence over exclusion. Default is none.",
AttributeDefinition.STRING,
null,
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_GZIP_EXCLUDED_MIME_TYPES))));
adList.add(new AttributeDefinitionImpl(HttpConfig.PROP_INVALIDATE_SESSION,
"Invalidate Container Session",
"If this property is set, the container session is automatically validated.",
HttpConfig.DEFAULT_INVALIDATE_SESSION,
bundle.getBundleContext().getProperty(HttpConfig.PROP_INVALIDATE_SESSION)));
adList.add(new AttributeDefinitionImpl(HttpConfig.PROP_CONTAINER_ADDED_ATTRIBUTE,
"Attributes added by server.",
"The attributes added by underlying session. Use this to invalidate session.",
AttributeDefinition.STRING,
new String[] {"org.eclipse.jetty.security.sessionCreatedSecure"},
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(HttpConfig.PROP_CONTAINER_ADDED_ATTRIBUTE))));
adList.add(new AttributeDefinitionImpl(HttpConfig.PROP_UNIQUE_SESSION_ID,
"Unique Session Id",
"If this property is set, each http context gets a unique session id (derived from the container session).",
HttpConfig.DEFAULT_UNIQUE_SESSION_ID,
bundle.getBundleContext().getProperty(HttpConfig.PROP_UNIQUE_SESSION_ID)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_STOP_TIMEOUT, "Server stop timeout",
"If not -1, stop timeout for the server in milliseconds.", -1L,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_STOP_TIMEOUT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP2_ENABLE,
"Enable Http/2",
"Whether to enable HTTP/2. Default is false.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP2_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_HTTP2_MAX_CONCURRENT_STREAMS,
"Http/2 Max Concurrent Streams",
"The max number of concurrent streams per connection. Default is 128.",
128,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_HTTP2_MAX_CONCURRENT_STREAMS)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_HTTP2_INITIAL_STREAM_RECV_WINDOW,
"Http/2 Initial Stream Recieve Window",
"The initial stream receive window (client to server). Default is 524288.",
524288,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_HTTP2_INITIAL_STREAM_RECV_WINDOW)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_HTTP2_INITIAL_SESSION_RECV_WINDOW,
"Http/2 Initial Session Recieve Window",
"The initial session receive window (client to server). Default is 1048576.",
1048576,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_HTTP2_INITIAL_SESSION_RECV_WINDOW)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_ALPN_PROTOCOLS,
"ALPN Protocols",
"The ALPN protocols to consider. Default is h2, http/1.1.",
AttributeDefinition.STRING,
new String[] {"h2", "http/1.1"},
2147483647,
null, null,
getStringArray(bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_ALPN_PROTOCOLS))));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_ALPN_DEFAULT_PROTOCOL,
"ALPN Default Protocol",
"The default protocol when negotiation fails. Default is http/1.1.",
"http/1.1",
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_ALPN_DEFAULT_PROTOCOL)));
// most important request logging attributes
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_REQUEST_LOG_FILE_PATH,
"Request Log File Path",
"The path to the log file which is receiving request log entries. If empty no request log file is created",
null,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_REQUEST_LOG_FILE_PATH)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_REQUEST_LOG_FILE_FORMAT,
"Request Log File Format",
"The format of the request log file entries. Only relevant if 'Request Log File Path' is set. Valid placeholders are described in https://www.eclipse.org/jetty/documentation/jetty-11/operations-guide/index.html#og-module-requestlog",
CustomRequestLog.NCSA_FORMAT,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_REQUEST_LOG_FILE_FORMAT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_REQUEST_LOG_OSGI_ENABLE,
"Enable SLF4J Request Logging",
"Select to log requests through SLF4J logger with given name (on level INFO)",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_REQUEST_LOG_OSGI_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_REQUEST_LOG_OSGI_LOGGER_NAME,
"SLF4J Request Log Logger Name",
"The name of the SLF4J request logger. Only relevant if 'Enable SLF4J Request Logging' is checked.",
SystemLogger.LOGGER.getName(),
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_REQUEST_LOG_OSGI_LOGGER_NAME)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_HTTP_REQUEST_LOG_FORMAT,
"SLF4J Request Log Format",
"The format of the request log entries. Only relevant if 'Enable SLF4J Request Logging' is checked. Valid placeholders are described in https://www.eclipse.org/jetty/documentation/jetty-11/operations-guide/index.html#og-module-requestlog",
CustomRequestLog.NCSA_FORMAT,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_HTTP_REQUEST_LOG_FORMAT)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JAKARTA_WEBSOCKET_ENABLE,
"Enable Jakarta standard WebSocket support",
"Whether to enable jakarta standard WebSocket support. Default is false.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JAKARTA_WEBSOCKET_ENABLE)));
adList.add(new AttributeDefinitionImpl(JettyConfig.FELIX_JETTY_WEBSOCKET_ENABLE,
"Enable Jetty specific WebSocket support",
"Whether to enable jetty specific WebSocket support. Default is false.",
false,
bundle.getBundleContext().getProperty(JettyConfig.FELIX_JETTY_WEBSOCKET_ENABLE)));
return new ObjectClassDefinition()
{
private final AttributeDefinition[] attrs = adList
.toArray(new AttributeDefinition[adList.size()]);
@Override
public String getName()
{
return "Apache Felix Jetty Based Http Service";
}
@Override
public InputStream getIcon(int arg0)
{
return null;
}
@Override
public String getID()
{
return JettyService.PID;
}
@Override
public String getDescription()
{
return "Configuration for the embedded Jetty Servlet Container.";
}
@Override
public AttributeDefinition[] getAttributeDefinitions(int filter)
{
return (filter == OPTIONAL) ? null : attrs;
}
};
}
private String [] getStringArray(final String value)
{
if ( value != null )
{
return value.trim().split(",");
}
return null;
}
private static class AttributeDefinitionImpl implements AttributeDefinition
{
private final String id;
private final String name;
private final String description;
private final int type;
private final String[] defaultValues;
private final int cardinality;
private final String[] optionLabels;
private final String[] optionValues;
/**
* Constructor for password properties
* @param id The id of the property
* @param name The property name
* @param description The property description
*/
AttributeDefinitionImpl( final String id, final String name, final String description )
{
this( id, name, description, PASSWORD, (String[])null, 0, null, null, (String[])null );
}
AttributeDefinitionImpl( final String id, final String name, final String description, final String defaultValue, final String overrideValue )
{
this( id, name, description, STRING, defaultValue == null ? null : new String[] { defaultValue }, 0, null, null, overrideValue == null ? null : new String[] { overrideValue } );
}
AttributeDefinitionImpl( final String id, final String name, final String description, final long defaultValue, final String overrideValue )
{
this( id, name, description, LONG, new String[]
{ String.valueOf(defaultValue) }, 0, null, null, overrideValue == null ? null : new String[] { overrideValue } );
}
AttributeDefinitionImpl( final String id, final String name, final String description, final int defaultValue, final String overrideValue )
{
this( id, name, description, INTEGER, new String[]
{ String.valueOf(defaultValue) }, 0, null, null, overrideValue == null ? null : new String[] { overrideValue } );
}
AttributeDefinitionImpl( final String id, final String name, final String description, final boolean defaultValue, final String overrideValue )
{
this( id, name, description, BOOLEAN, new String[]
{ String.valueOf(defaultValue) }, 0, null, null, overrideValue == null ? null : new String[] { overrideValue } );
}
AttributeDefinitionImpl( final String id, final String name, final String description, final int type,
final String[] defaultValues, final int cardinality, final String[] optionLabels,
final String[] optionValues,
final String overrideValue)
{
this(id, name, description, type, defaultValues, cardinality, optionLabels, optionValues, overrideValue == null ? null : new String[] { overrideValue });
}
AttributeDefinitionImpl( final String id, final String name, final String description, final int type,
final String[] defaultValues, final int cardinality, final String[] optionLabels,
final String[] optionValues,
final String[] overrideValues)
{
this.id = id;
this.name = name;
this.description = description;
this.type = type;
if ( overrideValues != null )
{
this.defaultValues = overrideValues;
}
else
{
this.defaultValues = defaultValues;
}
this.cardinality = cardinality;
this.optionLabels = optionLabels;
this.optionValues = optionValues;
}
@Override
public int getCardinality()
{
return cardinality;
}
@Override
public String[] getDefaultValue()
{
return defaultValues;
}
@Override
public String getDescription()
{
return description;
}
@Override
public String getID()
{
return id;
}
@Override
public String getName()
{
return name;
}
@Override
public String[] getOptionLabels()
{
return optionLabels;
}
@Override
public String[] getOptionValues()
{
return optionValues;
}
@Override
public int getType()
{
return type;
}
@Override
public String validate( String arg0 )
{
return null;
}
}
}
|
googleads/google-ads-java | 35,400 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/CriterionCategoryLocaleAvailability.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/common/criterion_category_availability.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.common;
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability}
*/
public final class CriterionCategoryLocaleAvailability extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)
CriterionCategoryLocaleAvailabilityOrBuilder {
private static final long serialVersionUID = 0L;
// Use CriterionCategoryLocaleAvailability.newBuilder() to construct.
private CriterionCategoryLocaleAvailability(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CriterionCategoryLocaleAvailability() {
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CriterionCategoryLocaleAvailability();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v19_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v19_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.Builder.class);
}
private int bitField0_;
public static final int AVAILABILITY_MODE_FIELD_NUMBER = 1;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
public static final int COUNTRY_CODE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
@java.lang.Override
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
@java.lang.Override
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
@java.lang.Override
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (availabilityMode_ != com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
output.writeEnum(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (availabilityMode_ != com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability other = (com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability) obj;
if (availabilityMode_ != other.availabilityMode_) return false;
if (hasCountryCode() != other.hasCountryCode()) return false;
if (hasCountryCode()) {
if (!getCountryCode()
.equals(other.getCountryCode())) return false;
}
if (hasLanguageCode() != other.hasLanguageCode()) return false;
if (hasLanguageCode()) {
if (!getLanguageCode()
.equals(other.getLanguageCode())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AVAILABILITY_MODE_FIELD_NUMBER;
hash = (53 * hash) + availabilityMode_;
if (hasCountryCode()) {
hash = (37 * hash) + COUNTRY_CODE_FIELD_NUMBER;
hash = (53 * hash) + getCountryCode().hashCode();
}
if (hasLanguageCode()) {
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailabilityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v19_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v19_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.Builder.class);
}
// Construct using com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v19_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability build() {
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability buildPartial() {
com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability result = new com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.availabilityMode_ = availabilityMode_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.countryCode_ = countryCode_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.languageCode_ = languageCode_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability) {
return mergeFrom((com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability other) {
if (other == com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability.getDefaultInstance()) return this;
if (other.availabilityMode_ != 0) {
setAvailabilityModeValue(other.getAvailabilityModeValue());
}
if (other.hasCountryCode()) {
countryCode_ = other.countryCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLanguageCode()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
availabilityMode_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 34: {
countryCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The enum numeric value on the wire for availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityModeValue(int value) {
availabilityMode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityMode(com.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
availabilityMode_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return This builder for chaining.
*/
public Builder clearAvailabilityMode() {
bitField0_ = (bitField0_ & ~0x00000001);
availabilityMode_ = 0;
onChanged();
return this;
}
private java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return This builder for chaining.
*/
public Builder clearCountryCode() {
countryCode_ = getDefaultInstance().getCountryCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The bytes for countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability)
private static final com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability();
}
public static com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CriterionCategoryLocaleAvailability>
PARSER = new com.google.protobuf.AbstractParser<CriterionCategoryLocaleAvailability>() {
@java.lang.Override
public CriterionCategoryLocaleAvailability parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,400 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/CriterionCategoryLocaleAvailability.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/common/criterion_category_availability.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.common;
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability}
*/
public final class CriterionCategoryLocaleAvailability extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)
CriterionCategoryLocaleAvailabilityOrBuilder {
private static final long serialVersionUID = 0L;
// Use CriterionCategoryLocaleAvailability.newBuilder() to construct.
private CriterionCategoryLocaleAvailability(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CriterionCategoryLocaleAvailability() {
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CriterionCategoryLocaleAvailability();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v20_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v20_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.Builder.class);
}
private int bitField0_;
public static final int AVAILABILITY_MODE_FIELD_NUMBER = 1;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
public static final int COUNTRY_CODE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
@java.lang.Override
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
@java.lang.Override
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
@java.lang.Override
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (availabilityMode_ != com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
output.writeEnum(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (availabilityMode_ != com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability other = (com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability) obj;
if (availabilityMode_ != other.availabilityMode_) return false;
if (hasCountryCode() != other.hasCountryCode()) return false;
if (hasCountryCode()) {
if (!getCountryCode()
.equals(other.getCountryCode())) return false;
}
if (hasLanguageCode() != other.hasLanguageCode()) return false;
if (hasLanguageCode()) {
if (!getLanguageCode()
.equals(other.getLanguageCode())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AVAILABILITY_MODE_FIELD_NUMBER;
hash = (53 * hash) + availabilityMode_;
if (hasCountryCode()) {
hash = (37 * hash) + COUNTRY_CODE_FIELD_NUMBER;
hash = (53 * hash) + getCountryCode().hashCode();
}
if (hasLanguageCode()) {
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailabilityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v20_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v20_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.Builder.class);
}
// Construct using com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v20_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability build() {
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability buildPartial() {
com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability result = new com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.availabilityMode_ = availabilityMode_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.countryCode_ = countryCode_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.languageCode_ = languageCode_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability) {
return mergeFrom((com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability other) {
if (other == com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability.getDefaultInstance()) return this;
if (other.availabilityMode_ != 0) {
setAvailabilityModeValue(other.getAvailabilityModeValue());
}
if (other.hasCountryCode()) {
countryCode_ = other.countryCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLanguageCode()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
availabilityMode_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 34: {
countryCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The enum numeric value on the wire for availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityModeValue(int value) {
availabilityMode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityMode(com.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
availabilityMode_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return This builder for chaining.
*/
public Builder clearAvailabilityMode() {
bitField0_ = (bitField0_ & ~0x00000001);
availabilityMode_ = 0;
onChanged();
return this;
}
private java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return This builder for chaining.
*/
public Builder clearCountryCode() {
countryCode_ = getDefaultInstance().getCountryCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The bytes for countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability)
private static final com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability();
}
public static com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CriterionCategoryLocaleAvailability>
PARSER = new com.google.protobuf.AbstractParser<CriterionCategoryLocaleAvailability>() {
@java.lang.Override
public CriterionCategoryLocaleAvailability parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,400 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/CriterionCategoryLocaleAvailability.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/criterion_category_availability.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability}
*/
public final class CriterionCategoryLocaleAvailability extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)
CriterionCategoryLocaleAvailabilityOrBuilder {
private static final long serialVersionUID = 0L;
// Use CriterionCategoryLocaleAvailability.newBuilder() to construct.
private CriterionCategoryLocaleAvailability(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CriterionCategoryLocaleAvailability() {
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CriterionCategoryLocaleAvailability();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v21_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v21_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.Builder.class);
}
private int bitField0_;
public static final int AVAILABILITY_MODE_FIELD_NUMBER = 1;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
public static final int COUNTRY_CODE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
@java.lang.Override
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
@java.lang.Override
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
@java.lang.Override
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (availabilityMode_ != com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
output.writeEnum(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (availabilityMode_ != com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, availabilityMode_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, countryCode_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability other = (com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability) obj;
if (availabilityMode_ != other.availabilityMode_) return false;
if (hasCountryCode() != other.hasCountryCode()) return false;
if (hasCountryCode()) {
if (!getCountryCode()
.equals(other.getCountryCode())) return false;
}
if (hasLanguageCode() != other.hasLanguageCode()) return false;
if (hasLanguageCode()) {
if (!getLanguageCode()
.equals(other.getLanguageCode())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + AVAILABILITY_MODE_FIELD_NUMBER;
hash = (53 * hash) + availabilityMode_;
if (hasCountryCode()) {
hash = (37 * hash) + COUNTRY_CODE_FIELD_NUMBER;
hash = (53 * hash) + getCountryCode().hashCode();
}
if (hasLanguageCode()) {
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Information about which locales a category is available in.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailabilityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v21_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v21_common_CriterionCategoryLocaleAvailability_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.class, com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
availabilityMode_ = 0;
countryCode_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.CriterionCategoryAvailabilityProto.internal_static_google_ads_googleads_v21_common_CriterionCategoryLocaleAvailability_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability build() {
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability buildPartial() {
com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability result = new com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.availabilityMode_ = availabilityMode_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.countryCode_ = countryCode_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.languageCode_ = languageCode_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability) {
return mergeFrom((com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability other) {
if (other == com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability.getDefaultInstance()) return this;
if (other.availabilityMode_ != 0) {
setAvailabilityModeValue(other.getAvailabilityModeValue());
}
if (other.hasCountryCode()) {
countryCode_ = other.countryCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLanguageCode()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
availabilityMode_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 34: {
countryCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int availabilityMode_ = 0;
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The enum numeric value on the wire for availabilityMode.
*/
@java.lang.Override public int getAvailabilityModeValue() {
return availabilityMode_;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The enum numeric value on the wire for availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityModeValue(int value) {
availabilityMode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return The availabilityMode.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode getAvailabilityMode() {
com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode result = com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.forNumber(availabilityMode_);
return result == null ? com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode.UNRECOGNIZED : result;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @param value The availabilityMode to set.
* @return This builder for chaining.
*/
public Builder setAvailabilityMode(com.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
availabilityMode_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and
* language will be empty), COUNTRY (only country will be set), LANGUAGE (only
* language wil be set), COUNTRY_AND_LANGUAGE (both country and language will
* be set).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionCategoryLocaleAvailabilityModeEnum.CriterionCategoryLocaleAvailabilityMode availability_mode = 1;</code>
* @return This builder for chaining.
*/
public Builder clearAvailabilityMode() {
bitField0_ = (bitField0_ & ~0x00000001);
availabilityMode_ = 0;
onChanged();
return this;
}
private java.lang.Object countryCode_ = "";
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return Whether the countryCode field is set.
*/
public boolean hasCountryCode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The countryCode.
*/
public java.lang.String getCountryCode() {
java.lang.Object ref = countryCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
countryCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return The bytes for countryCode.
*/
public com.google.protobuf.ByteString
getCountryCodeBytes() {
java.lang.Object ref = countryCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
countryCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @return This builder for chaining.
*/
public Builder clearCountryCode() {
countryCode_ = getDefaultInstance().getCountryCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The ISO-3166-1 alpha-2 country code associated with the category.
* </pre>
*
* <code>optional string country_code = 4;</code>
* @param value The bytes for countryCode to set.
* @return This builder for chaining.
*/
public Builder setCountryCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
countryCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return Whether the languageCode field is set.
*/
public boolean hasLanguageCode() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString
getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* ISO 639-1 code of the language associated with the category.
* </pre>
*
* <code>optional string language_code = 5;</code>
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability)
private static final com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability();
}
public static com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CriterionCategoryLocaleAvailability>
PARSER = new com.google.protobuf.AbstractParser<CriterionCategoryLocaleAvailability>() {
@java.lang.Override
public CriterionCategoryLocaleAvailability parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CriterionCategoryLocaleAvailability> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.CriterionCategoryLocaleAvailability getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,294 | java-iam-admin/proto-google-iam-admin-v1/src/main/java/com/google/iam/admin/v1/SignJwtRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/admin/v1/iam.proto
// Protobuf Java Version: 3.25.8
package com.google.iam.admin.v1;
/**
*
*
* <pre>
* Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The service account sign JWT request.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.SignJwtRequest}
*/
public final class SignJwtRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.iam.admin.v1.SignJwtRequest)
SignJwtRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SignJwtRequest.newBuilder() to construct.
private SignJwtRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SignJwtRequest() {
name_ = "";
payload_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SignJwtRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_SignJwtRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_SignJwtRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.SignJwtRequest.class,
com.google.iam.admin.v1.SignJwtRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @return The name.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @return The bytes for name.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAYLOAD_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object payload_ = "";
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @return The payload.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getPayload() {
java.lang.Object ref = payload_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
payload_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @return The bytes for payload.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getPayloadBytes() {
java.lang.Object ref = payload_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
payload_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, payload_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, payload_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.iam.admin.v1.SignJwtRequest)) {
return super.equals(obj);
}
com.google.iam.admin.v1.SignJwtRequest other = (com.google.iam.admin.v1.SignJwtRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getPayload().equals(other.getPayload())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;
hash = (53 * hash) + getPayload().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.SignJwtRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.SignJwtRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.iam.admin.v1.SignJwtRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.iam.admin.v1.SignJwtRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The service account sign JWT request.
* </pre>
*
* Protobuf type {@code google.iam.admin.v1.SignJwtRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.iam.admin.v1.SignJwtRequest)
com.google.iam.admin.v1.SignJwtRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_SignJwtRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_SignJwtRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.iam.admin.v1.SignJwtRequest.class,
com.google.iam.admin.v1.SignJwtRequest.Builder.class);
}
// Construct using com.google.iam.admin.v1.SignJwtRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
payload_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.iam.admin.v1.Iam
.internal_static_google_iam_admin_v1_SignJwtRequest_descriptor;
}
@java.lang.Override
public com.google.iam.admin.v1.SignJwtRequest getDefaultInstanceForType() {
return com.google.iam.admin.v1.SignJwtRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.iam.admin.v1.SignJwtRequest build() {
com.google.iam.admin.v1.SignJwtRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.iam.admin.v1.SignJwtRequest buildPartial() {
com.google.iam.admin.v1.SignJwtRequest result =
new com.google.iam.admin.v1.SignJwtRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.iam.admin.v1.SignJwtRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.payload_ = payload_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.iam.admin.v1.SignJwtRequest) {
return mergeFrom((com.google.iam.admin.v1.SignJwtRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.iam.admin.v1.SignJwtRequest other) {
if (other == com.google.iam.admin.v1.SignJwtRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPayload().isEmpty()) {
payload_ = other.payload_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
payload_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @return The name.
*/
@java.lang.Deprecated
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @return The bytes for name.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @param value The name to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The resource name of the service account in the following format:
* `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`.
* Using `-` as a wildcard for the `PROJECT_ID` will infer the project from
* the account. The `ACCOUNT` value can be the `email` address or the
* `unique_id` of the service account.
* </pre>
*
* <code>
* string name = 1 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.name is deprecated. See
* google/iam/admin/v1/iam.proto;l=1023
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object payload_ = "";
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @return The payload.
*/
@java.lang.Deprecated
public java.lang.String getPayload() {
java.lang.Object ref = payload_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
payload_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @return The bytes for payload.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getPayloadBytes() {
java.lang.Object ref = payload_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
payload_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @param value The payload to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setPayload(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
payload_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearPayload() {
payload_ = getDefaultInstance().getPayload();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Deprecated. [Migrate to Service Account Credentials
* API](https://cloud.google.com/iam/help/credentials/migrate-api).
*
* The JWT payload to sign. Must be a serialized JSON object that contains a
* JWT Claims Set. For example: `{"sub": "user@example.com", "iat": 313435}`
*
* If the JWT Claims Set contains an expiration time (`exp`) claim, it must be
* an integer timestamp that is not in the past and no more than 12 hours in
* the future.
*
* If the JWT Claims Set does not contain an expiration time (`exp`) claim,
* this claim is added automatically, with a timestamp that is 1 hour in the
* future.
* </pre>
*
* <code>string payload = 2 [deprecated = true, (.google.api.field_behavior) = REQUIRED];</code>
*
* @deprecated google.iam.admin.v1.SignJwtRequest.payload is deprecated. See
* google/iam/admin/v1/iam.proto;l=1044
* @param value The bytes for payload to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setPayloadBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
payload_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.iam.admin.v1.SignJwtRequest)
}
// @@protoc_insertion_point(class_scope:google.iam.admin.v1.SignJwtRequest)
private static final com.google.iam.admin.v1.SignJwtRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.iam.admin.v1.SignJwtRequest();
}
public static com.google.iam.admin.v1.SignJwtRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SignJwtRequest> PARSER =
new com.google.protobuf.AbstractParser<SignJwtRequest>() {
@java.lang.Override
public SignJwtRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SignJwtRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SignJwtRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.iam.admin.v1.SignJwtRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 34,761 | compiler/src/jdk.graal.compiler.test/src/jdk/graal/compiler/hotspot/test/HotSpotCryptoSubstitutionTest.java | /*
* Copyright (c) 2013, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.hotspot.test;
import static jdk.graal.compiler.hotspot.HotSpotBackend.SHA2_IMPL_COMPRESS_MB;
import static jdk.graal.compiler.hotspot.HotSpotBackend.SHA3_IMPL_COMPRESS_MB;
import static jdk.graal.compiler.hotspot.HotSpotBackend.SHA5_IMPL_COMPRESS_MB;
import static jdk.graal.compiler.hotspot.HotSpotBackend.SHA_IMPL_COMPRESS_MB;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.AlgorithmParameters;
import java.security.GeneralSecurityException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import java.security.Signature;
import java.util.Arrays;
import java.util.Random;
import javax.crypto.Cipher;
import javax.crypto.KEM;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import jdk.graal.compiler.core.test.GraalCompilerTest;
import jdk.graal.compiler.hotspot.meta.HotSpotForeignCallDescriptor;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.nodes.extended.ForeignCallNode;
import jdk.graal.compiler.replacements.SnippetSubstitutionNode;
import jdk.graal.compiler.replacements.nodes.AESNode;
import jdk.graal.compiler.replacements.nodes.CipherBlockChainingAESNode;
import jdk.graal.compiler.replacements.nodes.CounterModeAESNode;
import jdk.graal.compiler.replacements.nodes.MessageDigestNode.SHA1Node;
import jdk.graal.compiler.replacements.nodes.MessageDigestNode.SHA256Node;
import jdk.graal.compiler.replacements.nodes.MessageDigestNode.SHA3Node;
import jdk.graal.compiler.replacements.nodes.MessageDigestNode.SHA512Node;
import jdk.vm.ci.code.BailoutException;
import jdk.vm.ci.code.InstalledCode;
import jdk.vm.ci.meta.ResolvedJavaMethod;
/**
* Tests the intrinsification of certain crypto methods.
*/
public class HotSpotCryptoSubstitutionTest extends HotSpotGraalCompilerTest {
private final byte[] input;
public HotSpotCryptoSubstitutionTest() throws IOException {
input = readClassfile16(getClass());
}
private ResolvedJavaMethod getResolvedJavaMethod(String className, String methodName) throws ClassNotFoundException {
Class<?> klass = Class.forName(className);
return getMetaAccess().lookupJavaMethod(getMethod(klass, methodName));
}
private ResolvedJavaMethod getResolvedJavaMethod(String className, String methodName, Class<?>... parameterTypes) throws ClassNotFoundException {
Class<?> klass = Class.forName(className);
return getMetaAccess().lookupJavaMethod(getMethod(klass, methodName, parameterTypes));
}
private void testEncryptDecrypt(String className, String methodName, String generatorAlgorithm, int keySize, String algorithm) throws GeneralSecurityException, ClassNotFoundException {
testEncryptDecrypt(getResolvedJavaMethod(className, methodName), generatorAlgorithm, keySize, algorithm);
}
private void testEncryptDecrypt(ResolvedJavaMethod intrinsicMethod, String generatorAlgorithm, int keySize, String algorithm) throws GeneralSecurityException {
KeyGenerator gen = KeyGenerator.getInstance(generatorAlgorithm);
gen.init(keySize);
SecretKey key = gen.generateKey();
Result expected = runEncryptDecrypt(key, algorithm);
InstalledCode intrinsic = compileAndInstallSubstitution(intrinsicMethod);
Assert.assertTrue("missing intrinsic", intrinsic != null);
Result actual = runEncryptDecrypt(key, algorithm);
assertEquals(expected, actual);
intrinsic.invalidate();
}
@Test
public void testAESEncryptBlock() throws Exception {
Assume.assumeTrue("AESNode not supported", AESNode.isSupported(getArchitecture()));
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 128, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 192, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 256, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 128, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 192, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implEncryptBlock", "AES", 256, "AES/CBC/PKCS5Padding");
}
@Test
public void testAESDecryptBlock() throws Exception {
Assume.assumeTrue("AESNode not supported", AESNode.isSupported(getArchitecture()));
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 128, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 192, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 256, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 128, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 192, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.AESCrypt", "implDecryptBlock", "AES", 256, "AES/CBC/PKCS5Padding");
}
@Test
public void testCipherBlockChainingEncrypt() throws Exception {
Assume.assumeTrue("CipherBlockChainingAESNode not supported", CipherBlockChainingAESNode.isSupported(getArchitecture()));
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 128, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 192, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 256, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 128, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 192, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "AES", 256, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "DESede", 168, "DESede/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implEncrypt", "DESede", 168, "DESede/CBC/PKCS5Padding");
}
@Test
public void testCipherBlockChainingDecrypt() throws Exception {
Assume.assumeTrue("CipherBlockChainingAESNode not supported", CipherBlockChainingAESNode.isSupported(getArchitecture()));
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 128, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 192, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 256, "AES/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 128, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 192, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "AES", 256, "AES/CBC/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "DESede", 168, "DESede/CBC/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CipherBlockChaining", "implDecrypt", "DESede", 168, "DESede/CBC/PKCS5Padding");
}
@Test
public void testCounterModeEncrypt() throws Exception {
Assume.assumeTrue("CounterModeAESNode not supported", CounterModeAESNode.isSupported(getArchitecture()));
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 128, "AES/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 192, "AES/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 256, "AES/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 128, "AES/CTR/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 192, "AES/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "AES", 256, "AES/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "DESede", 168, "DESede/CTR/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.CounterMode", "implCrypt", "DESede", 168, "DESede/CTR/PKCS5Padding");
}
@Test
public void testEletronicCodeBookEncrypt() throws Exception {
Assume.assumeTrue("ElectronicCodeBook encrypt not supported", runtime().getVMConfig().electronicCodeBookEncrypt != 0L);
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 128, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 192, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 256, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 128, "AES/ECB/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 192, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "AES", 256, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "DESede", 168, "DESede/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBEncrypt", "DESede", 168, "DESede/ECB/PKCS5Padding");
}
@Test
public void testEletronicCodeBookDecrypt() throws Exception {
Assume.assumeTrue("ElectronicCodeBook decrypt not supported", runtime().getVMConfig().electronicCodeBookDecrypt != 0L);
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 128, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 192, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 256, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 128, "AES/ECB/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 192, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "AES", 256, "AES/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "DESede", 168, "DESede/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ElectronicCodeBook", "implECBDecrypt", "DESede", 168, "DESede/ECB/PKCS5Padding");
}
@Test
public void testGaloisCounterModeCrypt() throws Exception {
Assume.assumeTrue("GaloisCounterMode not supported", runtime().getVMConfig().galoisCounterModeCrypt != 0L);
testEncryptDecrypt("com.sun.crypto.provider.GaloisCounterMode", "implGCMCrypt0", "AES", 128, "AES/GCM/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.GaloisCounterMode", "implGCMCrypt0", "AES", 128, "AES/GCM/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.GaloisCounterMode", "implGCMCrypt0", "DESede", 168, "DESede/GCM/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.GaloisCounterMode", "implGCMCrypt0", "DESede", 168, "DESede/GCM/PKCS5Padding");
}
@Test
public void testPoly1305() throws Exception {
Assume.assumeTrue("Poly1305 not supported", runtime().getVMConfig().poly1305ProcessBlocks != 0L);
testEncryptDecrypt(getResolvedJavaMethod("com.sun.crypto.provider.Poly1305", "processMultipleBlocks", byte[].class, int.class, int.class, long[].class, long[].class),
"ChaCha20", 256, "ChaCha20-Poly1305/None/NoPadding");
testEncryptDecrypt(getResolvedJavaMethod("com.sun.crypto.provider.Poly1305", "processMultipleBlocks", byte[].class, int.class, int.class, long[].class, long[].class),
"ChaCha20", 256, "ChaCha20-Poly1305/ECB/NoPadding");
testEncryptDecrypt(getResolvedJavaMethod("com.sun.crypto.provider.Poly1305", "processMultipleBlocks", byte[].class, int.class, int.class, long[].class, long[].class),
"ChaCha20", 256, "ChaCha20-Poly1305/None/PKCS5Padding");
testEncryptDecrypt(getResolvedJavaMethod("com.sun.crypto.provider.Poly1305", "processMultipleBlocks", byte[].class, int.class, int.class, long[].class, long[].class),
"ChaCha20", 256, "ChaCha20-Poly1305/ECB/PKCS5Padding");
}
@Test
public void testChaCha20() throws Exception {
Assume.assumeTrue("ChaCha20 not support", runtime().getVMConfig().chacha20Block != 0L);
testEncryptDecrypt("com.sun.crypto.provider.ChaCha20Cipher", "implChaCha20Block", "ChaCha20", 256, "ChaCha20-Poly1305/None/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ChaCha20Cipher", "implChaCha20Block", "ChaCha20", 256, "ChaCha20-Poly1305/ECB/NoPadding");
testEncryptDecrypt("com.sun.crypto.provider.ChaCha20Cipher", "implChaCha20Block", "ChaCha20", 256, "ChaCha20-Poly1305/None/PKCS5Padding");
testEncryptDecrypt("com.sun.crypto.provider.ChaCha20Cipher", "implChaCha20Block", "ChaCha20", 256, "ChaCha20-Poly1305/ECB/PKCS5Padding");
}
AlgorithmParameters algorithmParameters;
private byte[] encrypt(byte[] indata, SecretKey key, String algorithm) throws GeneralSecurityException {
byte[] result = indata;
Cipher c = Cipher.getInstance(algorithm);
c.init(Cipher.ENCRYPT_MODE, key);
algorithmParameters = c.getParameters();
byte[] r1 = c.update(result);
byte[] r2 = c.doFinal();
result = new byte[r1.length + r2.length];
System.arraycopy(r1, 0, result, 0, r1.length);
System.arraycopy(r2, 0, result, r1.length, r2.length);
return result;
}
private byte[] decrypt(byte[] indata, SecretKey key, String algorithm) throws GeneralSecurityException {
byte[] result = indata;
Cipher c = Cipher.getInstance(algorithm);
c.init(Cipher.DECRYPT_MODE, key, algorithmParameters);
byte[] r1 = c.update(result);
byte[] r2 = c.doFinal();
result = new byte[r1.length + r2.length];
System.arraycopy(r1, 0, result, 0, r1.length);
System.arraycopy(r2, 0, result, r1.length, r2.length);
return result;
}
private static byte[] readClassfile16(Class<? extends HotSpotCryptoSubstitutionTest> c) throws IOException {
String classFilePath = "/" + c.getName().replace('.', '/') + ".class";
InputStream stream = c.getResourceAsStream(classFilePath);
int bytesToRead = stream.available();
bytesToRead -= bytesToRead % 16;
byte[] classFile = new byte[bytesToRead];
new DataInputStream(stream).readFully(classFile);
return classFile;
}
public Result runEncryptDecrypt(SecretKey key, String algorithm) throws GeneralSecurityException {
try {
byte[] indata = input.clone();
byte[] cipher = encrypt(indata, key, algorithm);
byte[] plain = decrypt(cipher, key, algorithm);
Assert.assertArrayEquals(indata, plain);
return new Result(plain, null);
} catch (NoSuchAlgorithmException e) {
return new Result(null, e);
}
}
@Test
public void testDigestBaseSHA() throws Exception {
Assume.assumeTrue("SHA1 not supported", runtime().getVMConfig().sha1ImplCompressMultiBlock != 0L);
testDigestBase("sun.security.provider.DigestBase", "implCompressMultiBlock", "SHA-1", SHA_IMPL_COMPRESS_MB);
}
@Test
public void testDigestBaseSHA2() throws Exception {
Assume.assumeTrue("SHA256 not supported", runtime().getVMConfig().sha256ImplCompressMultiBlock != 0L);
testDigestBase("sun.security.provider.DigestBase", "implCompressMultiBlock", "SHA-256", SHA2_IMPL_COMPRESS_MB);
}
@Test
public void testDigestBaseSHA5() throws Exception {
Assume.assumeTrue("SHA512 not supported", runtime().getVMConfig().sha512ImplCompressMultiBlock != 0L);
testDigestBase("sun.security.provider.DigestBase", "implCompressMultiBlock", "SHA-512", SHA5_IMPL_COMPRESS_MB);
}
@Test
public void testDigestBaseSHA3() throws Exception {
Assume.assumeTrue("SHA3 not supported", runtime().getVMConfig().sha3ImplCompressMultiBlock != 0L);
testDigestBase("sun.security.provider.DigestBase", "implCompressMultiBlock", "SHA3-512", SHA3_IMPL_COMPRESS_MB);
}
@Before
public void clearExceptionCall() {
expectedCall = null;
}
HotSpotForeignCallDescriptor expectedCall;
@Override
protected void checkLowTierGraph(StructuredGraph graph) {
if (expectedCall != null) {
for (ForeignCallNode node : graph.getNodes().filter(ForeignCallNode.class)) {
if (node.getDescriptor() == expectedCall) {
return;
}
}
assertTrue("expected call to " + expectedCall, false);
}
}
private void testDigestBase(String className, String methodName, String algorithm, HotSpotForeignCallDescriptor call) throws Exception {
Class<?> klass = Class.forName(className);
expectedCall = call;
MessageDigest digest = MessageDigest.getInstance(algorithm);
byte[] expected = digest.digest(input.clone());
ResolvedJavaMethod method = getResolvedJavaMethod(klass, methodName);
try {
testDigestBase(digest, expected, method);
} catch (BailoutException e) {
// The plugin may cause loading which invalidates assumptions in the graph so retry it
// once. This normally only occurs when running individual tests.
if (e.getMessage().contains("Code installation failed: dependencies failed")) {
testDigestBase(digest, expected, method);
} else {
throw e;
}
}
}
private void testDigestBase(MessageDigest digest, byte[] expected, ResolvedJavaMethod method) {
StructuredGraph graph = parseForCompile(method);
assertTrue(graph.getNodes().filter(SnippetSubstitutionNode.class).isNotEmpty());
InstalledCode intrinsic = getCode(method, graph, false, true, GraalCompilerTest.getInitialOptions());
try {
Assert.assertNotNull("missing intrinsic", intrinsic);
byte[] actual = digest.digest(input.clone());
assertDeepEquals(expected, actual);
} finally {
intrinsic.invalidate();
}
}
public byte[] testDigest(String name, byte[] data) throws NoSuchAlgorithmException, NoSuchProviderException {
MessageDigest digest;
digest = MessageDigest.getInstance(name, "SUN");
digest.update(data);
return digest.digest();
}
byte[] getData() {
byte[] data = new byte[1024 * 16];
for (int i = 0; i < data.length; i++) {
data[i] = (byte) i;
}
return data;
}
@Test
public void testSha1() {
Assume.assumeTrue("SHA1 not supported", SHA1Node.isSupported(getArchitecture()));
assertMessageDigestAlgorithmExists("SHA-1");
testWithInstalledIntrinsic("sun.security.provider.SHA", "implCompress0", "testDigest", "SHA-1", getData());
}
void assertMessageDigestAlgorithmExists(String algorithm) {
// Ensure the algorithm exists
try {
MessageDigest.getInstance(algorithm, "SUN");
} catch (NoSuchAlgorithmException e) {
assertFalse(true, "unknown algorithm " + algorithm);
} catch (NoSuchProviderException e) {
throw new RuntimeException(e);
}
}
void testWithInstalledIntrinsic(String className, String methodName, String testSnippetName, Object... args) {
Class<?> c;
try {
c = Class.forName(className);
} catch (ClassNotFoundException e) {
// It's ok to not find the class - a different security provider
// may have been installed
Assume.assumeTrue(className + " is not available", false);
return;
}
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, methodName)), testSnippetName, args);
}
void testWithInstalledIntrinsic(ResolvedJavaMethod intrinsicMethod, String testSnippetName, Object... args) {
InstalledCode code = null;
try {
ResolvedJavaMethod method = getResolvedJavaMethod(testSnippetName);
Object receiver = method.isStatic() ? null : this;
GraalCompilerTest.Result expect = executeExpected(method, receiver, args);
code = compileAndInstallSubstitution(intrinsicMethod);
assertTrue("Failed to install " + intrinsicMethod.getName(), code != null);
testAgainstExpected(method, expect, receiver, args);
} catch (AssumptionViolatedException e) {
// Suppress so that subsequent calls to this method within the
// same Junit @Test annotated method can proceed.
}
if (code != null) {
code.invalidate();
}
}
@Test
public void testSha256() {
Assume.assumeTrue("SHA256 not supported", SHA256Node.isSupported(getArchitecture()));
assertMessageDigestAlgorithmExists("SHA-256");
testWithInstalledIntrinsic("sun.security.provider.SHA2", "implCompress0", "testDigest", "SHA-256", getData());
}
@Test
public void testSha512() {
Assume.assumeTrue("SHA512 not supported", SHA512Node.isSupported(getArchitecture()));
assertMessageDigestAlgorithmExists("SHA-512");
testWithInstalledIntrinsic("sun.security.provider.SHA5", "implCompress0", "testDigest", "SHA-512", getData());
}
@Test
public void testSha3() {
Assume.assumeTrue("SHA3 not supported", SHA3Node.isSupported(getArchitecture()));
assertMessageDigestAlgorithmExists("SHA3-512");
testWithInstalledIntrinsic("sun.security.provider.SHA3", "implCompress0", "testDigest", "SHA3-512", getData());
}
@Test
public void testMD5() {
assertMessageDigestAlgorithmExists("MD5");
testWithInstalledIntrinsic("sun.security.provider.MD5", "implCompress0", "testDigest", "MD5", getData());
}
static class SeededSecureRandom extends SecureRandom {
private static final long serialVersionUID = 1L;
private final Random rnd;
SeededSecureRandom() {
rnd = GraalCompilerTest.getRandomInstance();
}
@Override
public void nextBytes(byte[] bytes) {
rnd.nextBytes(bytes);
}
@Override
public byte[] generateSeed(int numBytes) {
var out = new byte[numBytes];
rnd.nextBytes(out);
return out;
}
}
KeyPair generateKeyPair(String algorithm) throws NoSuchAlgorithmException {
var g = KeyPairGenerator.getInstance(algorithm);
var size = switch (g.getAlgorithm()) {
case "RSA", "RSASSA-PSS", "DSA", "DiffieHellman" -> 1024;
case "EC" -> 256;
case "EdDSA", "Ed25519", "XDH", "X25519" -> 255;
case "Ed448", "X448" -> 448;
case "ML-KEM", "ML-KEM-768", "ML-KEM-512", "ML-KEM-1024",
"ML-DSA", "ML-DSA-44", "ML-DSA-65", "ML-DSA-87" ->
-1;
default -> throw new UnsupportedOperationException(algorithm);
};
g.initialize(size, new SeededSecureRandom());
return g.generateKeyPair();
}
public byte[] testSignVer(String algorithm) throws GeneralSecurityException {
var kp = generateKeyPair(algorithm);
var sig = Signature.getInstance(algorithm, "SUN");
sig.initSign(kp.getPrivate(), new SeededSecureRandom());
sig.update(input.clone());
byte[] result = sig.sign();
sig.initVerify(kp.getPublic());
boolean verifyResult = sig.verify(result);
byte[] newResult = Arrays.copyOf(result, result.length + 1);
newResult[result.length] = (byte) (verifyResult ? 1 : 0);
return newResult;
}
@Test
public void testMLDSASigVer() {
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDoubleKeccak != 0L);
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDilithiumAlmostNtt != 0L);
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDilithiumAlmostInverseNtt != 0L);
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDilithiumNttMult != 0L);
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDilithiumMontMulByConstant != 0L);
Assume.assumeTrue("ML_DSA not supported", runtime().getVMConfig().stubDilithiumDecomposePoly != 0L);
// ML-DSA-44
testWithInstalledIntrinsic("sun.security.provider.SHA3Parallel", "doubleKeccak", "testSignVer", "ML-DSA-44");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostNtt", "testSignVer", "ML-DSA-44");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostInverseNtt", "testSignVer", "ML-DSA-44");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumNttMult", "testSignVer", "ML-DSA-44");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumMontMulByConstant", "testSignVer", "ML-DSA-44");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumDecomposePoly", "testSignVer", "ML-DSA-44");
// ML-DSA-65
testWithInstalledIntrinsic("sun.security.provider.SHA3Parallel", "doubleKeccak", "testSignVer", "ML-DSA-65");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostNtt", "testSignVer", "ML-DSA-65");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostInverseNtt", "testSignVer", "ML-DSA-65");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumNttMult", "testSignVer", "ML-DSA-65");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumMontMulByConstant", "testSignVer", "ML-DSA-65");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumDecomposePoly", "testSignVer", "ML-DSA-65");
// ML-DSA-87
testWithInstalledIntrinsic("sun.security.provider.SHA3Parallel", "doubleKeccak", "testSignVer", "ML-DSA-87");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostNtt", "testSignVer", "ML-DSA-87");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumAlmostInverseNtt", "testSignVer", "ML-DSA-87");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumNttMult", "testSignVer", "ML-DSA-87");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumMontMulByConstant", "testSignVer", "ML-DSA-87");
testWithInstalledIntrinsic("sun.security.provider.ML_DSA", "implDilithiumDecomposePoly", "testSignVer", "ML-DSA-87");
}
public boolean testMLKEMEncapsulateDecapsulate(String algorithm) throws GeneralSecurityException {
var kp = generateKeyPair(algorithm);
var senderKem = KEM.getInstance(algorithm);
var encapsulator = senderKem.newEncapsulator(kp.getPublic(), new SeededSecureRandom());
var enc = encapsulator.encapsulate();
SecretKey key = enc.key();
var receiverKem = KEM.getInstance(algorithm);
byte[] ciphertext = enc.encapsulation();
var decapsulator = receiverKem.newDecapsulator(kp.getPrivate());
SecretKey decapsulatedKey = decapsulator.decapsulate(ciphertext);
return key.equals(decapsulatedKey);
}
@Test
public void testMLKEM() {
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberNtt != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberInverseNtt != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberNttMult != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberAddPoly2 != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberAddPoly3 != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyber12To16 != 0L);
Assume.assumeTrue("ML_KEM not supported", runtime().getVMConfig().stubKyberBarrettReduce != 0L);
Class<?> c;
try {
c = Class.forName("sun.security.provider.ML_KEM");
} catch (ClassNotFoundException e) {
Assume.assumeTrue("sun.security.provider.ML_KEM is not available", false);
return;
}
// ML-KEM-512
testWithInstalledIntrinsic("sun.security.provider.ML_KEM", "implKyberNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberInverseNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberNttMult", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate",
"ML-KEM-512");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyber12To16", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyber12To16", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberBarrettReduce", "testMLKEMEncapsulateDecapsulate", "ML-KEM-512");
// ML-KEM-768
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberInverseNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberNttMult", "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate",
"ML-KEM-768");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyber12To16", "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberBarrettReduce", "testMLKEMEncapsulateDecapsulate", "ML-KEM-768");
// ML-KEM-1024
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberInverseNtt", "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberNttMult", "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
testWithInstalledIntrinsic(getMetaAccess().lookupJavaMethod(getMethod(c, "implKyberAddPoly", short[].class, short[].class, short[].class, short[].class)), "testMLKEMEncapsulateDecapsulate",
"ML-KEM-1024");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyber12To16", "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
testWithInstalledIntrinsic("sun.security.provider.ML-KEM", "implKyberBarrettReduce", "testMLKEMEncapsulateDecapsulate", "ML-KEM-1024");
}
}
|
apache/sis | 35,554 | optional/src/org.apache.sis.gui/main/org/apache/sis/gui/coverage/ImagePropertyExplorer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.gui.coverage;
import java.util.Locale;
import java.util.List;
import java.util.Map;
import java.util.IdentityHashMap;
import java.util.function.Predicate;
import java.text.NumberFormat;
import java.io.IOException;
import java.awt.Rectangle;
import java.awt.image.RenderedImage;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.ObjectPropertyBase;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.transformation.FilteredList;
import javafx.geometry.Pos;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TitledPane;
import javafx.scene.control.TreeCell;
import javafx.scene.control.TreeItem;
import javafx.scene.control.TreeView;
import javafx.scene.layout.Background;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.util.Callback;
import org.apache.sis.gui.Widget;
import org.apache.sis.image.PlanarImage;
import org.apache.sis.image.ResampledImage;
import org.apache.sis.util.CharSequences;
import org.apache.sis.util.resources.Vocabulary;
import org.apache.sis.gui.internal.Styles;
import org.apache.sis.gui.internal.PropertyView;
import org.apache.sis.gui.internal.ImmutableObjectProperty;
import org.apache.sis.gui.internal.PropertyValueFormatter;
import org.apache.sis.gui.internal.Resources;
/**
* Information about {@link RenderedImage} (sources, layout, properties).
* The {@link #image} property value is shown as the root of a tree of images,
* with image {@linkplain RenderedImage#getSources() sources} as children.
* When an image is selected, its layout (image size, tile size, <i>etc.</i>) is described in a table.
* Image {@linkplain RenderedImage#getPropertyNames() properties} are also available in a separated table.
*
* <p>This widget is useful mostly for debugging purposes or for advanced users.
* For displaying a geospatial raster as a GIS application, see {@link CoverageCanvas} instead.</p>
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.2
* @since 1.1
*/
public class ImagePropertyExplorer extends Widget {
/**
* The root image to describe. This image will be the root of a tree;
* children will be image {@linkplain RenderedImage#getSources() sources}.
*
* <h4>API note</h4>
* We do not provide getter/setter for this property; use {@link ObjectProperty#set(Object)}
* directly instead. We omit the "Property" suffix for making this operation more natural.
*/
public final ObjectProperty<RenderedImage> image;
/**
* Implementation of {@link #image} property.
*/
private final class ImageProperty extends ObjectPropertyBase<RenderedImage> {
/** Returns the bean that contains this property. */
@Override public Object getBean() {return ImagePropertyExplorer.this;}
@Override public String getName() {return "image";}
/** Sets this property to the given value with no sub-region. */
@Override public void set(RenderedImage newValue) {setImage(newValue, null);}
/** Do the actual set operation without invoking {@link ImagePropertyExplorer} setter method. */
void assign(RenderedImage newValue) {super.set(newValue);}
}
/**
* Image region which is currently visible, or {@code null} if unspecified.
* Conceptually this field and {@link #image} should be set together. But we have not defined
* a container object for those two properties. So we use that field as a workaround for now.
*
* @see #setImage(RenderedImage, Rectangle)
*/
private Rectangle visibleImageBounds;
/**
* Whether {@link #visibleImageBounds} applies to the coordinate system of an image.
* This is initially {@code true} for an image specified by {@link CoverageCanvas} and become {@code false}
* after a {@link ResampledImage} is found. Images not present in this map are implicitly associated to the
* {@code false} value.
*
* <p>This map is also opportunistically used for avoiding never-ending recursion
* during the traversal of image sources.</p>
*
* @see #setImage(RenderedImage, Rectangle)
*/
private final Map<RenderedImage,Boolean> imageUseBoundsCS;
/**
* Whether to update {@code ImagePropertyExplorer} content when the {@link #image} changed.
* This is usually {@code true} unless this {@code ImagePropertyExplorer} is hidden,
* in which case it may be useful to temporary disable updates for saving CPU times.
*
* <div class="note"><b>Example:</b>
* if this {@code ImagePropertyExplorer} is shown in a {@link TitledPane}, one can bind this property
* to {@link TitledPane#expandedProperty()} for updating the content only if the pane is visible.
* </div>
*
* Note that setting this property to {@code false} may have the effect of discarding current content
* when the {@link #image} change. This is done for allowing the garbage collector to reclaim memory.
* The content is reset to {@link #image} properties when {@code updateOnChange} become {@code true} again.
*
* <h4>API note</h4>
* We do not provide getter/setter for this property; use {@link BooleanProperty#set(boolean)}
* directly instead. We omit the "Property" suffix for making this operation more natural.
*/
public final BooleanProperty updateOnChange;
/**
* Whether to notify {@code ImagePropertyExplorer} about {@link #image} changes.
* This may become {@code false} after {@link #updateOnChange} (not at the same time),
* and reset to {@code true} when {@code updateOnChange} become {@code true} again.
*
* @see #updateOnChange
* @see #startListening()
*/
private boolean listening;
/**
* The root {@link #image} and its sources as a tree. The root value may be {@code null} and the children
* removed if the tree needs to be rebuilt after an {@linkplain #image} change and this rebuild has been
* deferred ({@link #updateOnChange} is {@code false}).
*/
private final TreeItem<RenderedImage> sourcesRoot;
/**
* The selected item in the sources tree.
*
* @see #getSelectedImage()
*/
private final ReadOnlyObjectProperty<TreeItem<RenderedImage>> selectedImage;
/**
* The rows in the table showing layout information (image size, tile size, image position, <i>etc</i>).
* This list should be considered read-only.
*/
private final ObservableList<LayoutRow> layoutRows;
/**
* A row in the table showing image layout. The inherited {@link String} property is the label to show in
* the first column. That label never change, contrarily to the {@link #xp} and {@link #yp} property values
* which are updated every time that we need to update the content for a new image.
*/
private static final class LayoutRow extends ImmutableObjectProperty<String> {
/**
* Row indices where {@link LayoutRow} instances are shown, when all rows are present.
* Rows {@link #DISPLAYED_SIZE} and {@link #MIN_VISIBLE} may be absent, in which case
* next rows have their position shifted.
*/
static final int IMAGE_SIZE = 0, DISPLAYED_SIZE = 1, TILE_SIZE = 2, NUM_TILES = 3,
MIN_PIXEL = 4, MIN_VISIBLE = 5, MIN_TILE = 6;
/**
* Creates all rows.
*/
static LayoutRow[] values(final Vocabulary vocabulary, final Resources resources) {
final LayoutRow[] rows = new LayoutRow[7];
rows[IMAGE_SIZE] = new LayoutRow(true, vocabulary.getString(Vocabulary.Keys.ImageSize));
rows[DISPLAYED_SIZE] = new LayoutRow(false, resources .getString(Resources .Keys.DisplayedSize));
rows[TILE_SIZE] = new LayoutRow(true, vocabulary.getString(Vocabulary.Keys.TileSize));
rows[NUM_TILES] = new LayoutRow(true, vocabulary.getString(Vocabulary.Keys.NumberOfTiles));
rows[MIN_PIXEL] = new LayoutRow(true, resources .getString(Resources .Keys.ImageStart));
rows[MIN_VISIBLE] = new LayoutRow(false, resources .getString(Resources .Keys.DisplayStart));
rows[MIN_TILE] = new LayoutRow(true, resources .getString(Resources .Keys.TileIndexStart));
return rows;
}
/** Size or position along x and y axes, to show in second and third columns. */
final IntegerProperty xp, yp;
/**
* Whether this property is a core property to keep always visible.
*/
private final boolean core;
/** Creates a new row with the given label in first column. */
private LayoutRow(final boolean core, final String label) {
super(label);
this.core = core;
xp = new SimpleIntegerProperty();
yp = new SimpleIntegerProperty();
}
/**
* Updates {@link #xp} and {@link #yp} property values for the given image.
* The index <var>i</var> is the row index when no filtering is applied.
*/
final void update(final RenderedImage image, final Rectangle visibleImageBounds, final int i) {
int x = 0, y = 0;
if (image != null) switch (i) {
case IMAGE_SIZE: x = image.getWidth(); y = image.getHeight(); break;
case TILE_SIZE: x = image.getTileWidth(); y = image.getTileHeight(); break;
case NUM_TILES: x = image.getNumXTiles(); y = image.getNumYTiles(); break;
case MIN_TILE: x = image.getMinTileX(); y = image.getMinTileY(); break;
case MIN_PIXEL: x = image.getMinX(); y = image.getMinY(); break;
case MIN_VISIBLE: if (visibleImageBounds != null) {
x = visibleImageBounds.x;
y = visibleImageBounds.y;
}
break;
case DISPLAYED_SIZE: if (visibleImageBounds != null) {
x = visibleImageBounds.width;
y = visibleImageBounds.height;
}
break;
}
xp.set(x);
yp.set(y);
}
/**
* Filter for excluding the rows that need a non-null {@code visibleImageBounds} argument.
*/
static Predicate<LayoutRow> EXCLUDE_VISIBILITY = (r) -> r.core;
}
/**
* The predicate for filtering {@link #layoutRows}.
*
* @see LayoutRow#EXCLUDE_VISIBILITY
*/
private final ObjectProperty<Predicate<? super LayoutRow>> layoutFilter;
/**
* The rows in the tables showing property values.
* Rows in the list will be added and removed when the image changed.
*
* @see #updatePropertyList(RenderedImage)
*/
private final ObservableList<PropertyRow> propertyRows;
/**
* The selected item in the table of properties.
*/
private final ReadOnlyObjectProperty<PropertyRow> selectedProperty;
/**
* A row in the table showing image properties. The inherited {@link String} property is the property name.
* The property value is fetched from the given image and can be updated for the value of a new image.
* Updating an existing {@code PropertyRow} instead of creating a new instance is useful for keeping
* the selected row unchanged if possible.
*/
private static final class PropertyRow extends ImmutableObjectProperty<String> {
/**
* Image property value.
*/
final ObjectProperty<Object> value;
/**
* Creates a new row for the given property in the given image.
*/
PropertyRow(final RenderedImage image, final String property) {
super(property);
value = new SimpleObjectProperty<>(getProperty(image, property));
}
/**
* If this property can be updated to a value for the given image, performs
* the update and returns {@code true}. Otherwise returns {@code false}.
*/
final boolean update(final RenderedImage image, final String property) {
if (property.equals(super.get())) {
value.set(getProperty(image, property));
return true;
}
return false;
}
/**
* Returns a property value of given image, or the exception if that operation failed.
*/
private static Object getProperty(final RenderedImage image, final String property) {
try {
return image.getProperty(property);
} catch (RuntimeException e) {
return e;
}
}
/**
* Returns a human-readable variation of the property name for use in graphic interface.
*/
@Override
public String get() {
final String property = super.get();
return CharSequences.camelCaseToSentence(property.substring(property.lastIndexOf('.') + 1)).toString();
}
}
/**
* The tab where to show details about a property value. The content of tab may be different kinds
* of node depending on the class of the property to be show.
*
* @see #propertyDetails
* @see #updatePropertyDetails(Rectangle)
*/
private final Tab detailsTab;
/**
* Viewer of property value. The different components of this viewer are created when first needed.
*
* @see #updatePropertyDetails(Rectangle)
*/
private final PropertyView propertyDetails;
/**
* The view containing all visual components.
* The exact class may change in any future version.
*/
private final TabPane view;
/**
* Creates an initially empty explorer.
*/
public ImagePropertyExplorer() {
this(null, null);
}
/**
* Creates a new explorer.
*
* @param background the image background color, or {@code null} if none.
*/
@SuppressWarnings({"this-escape", "unchecked"}) // Generic array construction.
ImagePropertyExplorer(final Locale locale, final ObjectProperty<Background> background) {
final Vocabulary vocabulary = Vocabulary.forLocale(locale);
final Resources resources = Resources.forLocale(locale);
// Following variables could be class fields, but are not yet needed outside this constructor.
final TreeView<RenderedImage> sources;
final TableView<LayoutRow> layout;
final NumberFormat integerFormat;
final TableView<PropertyRow> properties;
image = new ImageProperty();
imageUseBoundsCS = new IdentityHashMap<>(4);
updateOnChange = new SimpleBooleanProperty(this, "updateOnChange", true);
listening = true;
/*
* Tree of image sources. The root is never changed after construction. All children nodes can
* be created, removed or updated to new value at any time. At most one image can be selected.
*/
{
sourcesRoot = new TreeItem<>();
sources = new TreeView<>(sourcesRoot);
selectedImage = sources.getSelectionModel().selectedItemProperty();
sources.setCellFactory(SourceCell::new);
selectedImage.addListener((p,o,n) -> {
RenderedImage selected = null;
if (n != null) selected = n.getValue();
imageSelected(selected != null ? selected : image.get());
});
}
/*
* Table of image layout built with a fixed set of rows: no row will be added or removed after
* construction. Instead, property values of existing rows will be modified when a new image
* is selected. Row selection are not allowed since we have nothing to do with selected rows.
*/
{
final FilteredList<LayoutRow> filtered;
layoutRows = FXCollections.observableArrayList(LayoutRow.values(vocabulary, resources));
filtered = new FilteredList<>(layoutRows);
layout = new TableView<>(filtered);
layoutFilter = filtered.predicateProperty();
integerFormat = NumberFormat.getIntegerInstance();
layout.setSelectionModel(null);
final TableColumn<LayoutRow, String> label = new TableColumn<>(resources.getString(Resources.Keys.SizeOrPosition));
final TableColumn<LayoutRow, Number> xCol = new TableColumn<>(resources.getString(Resources.Keys.Along_1, "X"));
final TableColumn<LayoutRow, Number> yCol = new TableColumn<>(resources.getString(Resources.Keys.Along_1, "Y"));
final Callback<TableColumn<LayoutRow, Number>,
TableCell<LayoutRow, Number>> cellFactory = (column) -> new LayoutCell(integerFormat);
xCol .setCellFactory(cellFactory);
yCol .setCellFactory(cellFactory);
xCol .setCellValueFactory((cell) -> cell.getValue().xp);
yCol .setCellValueFactory((cell) -> cell.getValue().yp);
label.setCellValueFactory((cell) -> cell.getValue());
layout.getColumns().setAll(label, xCol, yCol);
layout.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY_FLEX_LAST_COLUMN);
layout.getColumns().forEach((c) -> {
c.setReorderable(false);
c.setSortable(false);
});
}
/*
* Table of image properties. Contrarily to the layout table, the set of rows in
* this property table may change at any time. At most one row can be selected.
* We do not register a listener on the row selection; instead we wait for the
* details pane to become visible.
*/
{
properties = new TableView<>();
propertyRows = properties.getItems();
selectedProperty = properties.getSelectionModel().selectedItemProperty();
final TableColumn<PropertyRow, String> label = new TableColumn<>(vocabulary.getString(Vocabulary.Keys.Property));
final TableColumn<PropertyRow, Object> value = new TableColumn<>(vocabulary.getString(Vocabulary.Keys.Value));
label.setCellValueFactory((cell) -> cell.getValue());
value.setCellValueFactory((cell) -> cell.getValue().value);
value.setCellFactory((column) -> new PropertyCell(locale));
properties.getColumns().setAll(label, value);
properties.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY_FLEX_LAST_COLUMN);
properties.getColumns().forEach((c) -> c.setReorderable(false));
}
/*
* Tab where to show details about the currently selected property value.
* The tab content is updated when it become visible. We can do that because
* the property selection is done in another tab.
*/
{
detailsTab = new Tab(vocabulary.getString(Vocabulary.Keys.Details));
selectedProperty.addListener((p,o,n) -> clearPropertyValues(false));
propertyDetails = new PropertyView(locale, detailsTab.contentProperty(), background);
detailsTab.selectedProperty().addListener((p,o,n) -> {
if (n) updatePropertyDetails(getVisibleImageBounds(getSelectedImage()));
});
}
/*
* The view containing all visual components. In current version the sources is a tab like others.
* A previous version was showing the sources on top (using SlidePane), so we could navigate easily
* in the properties of different sources. It has been removed for simplifying the layout, but the
* listeners are still updating layout and property panes immediately when a new source is selected.
*/
view = new TabPane(
new Tab(vocabulary.getString(Vocabulary.Keys.Source), sources),
new Tab(vocabulary.getString(Vocabulary.Keys.Layout), layout),
new Tab(vocabulary.getString(Vocabulary.Keys.Properties), properties),
detailsTab);
view.setTabClosingPolicy(TabPane.TabClosingPolicy.UNAVAILABLE);
updateOnChange.addListener((p,o,n) -> {if (n) startListening();});
}
/**
* Invoked when {@link #updateOnChange} became {@code true}.
* This method updates the visual components for current image.
*
* <p>Note: there is no {@code stopListening()} method because setting {@link #listening} flag
* to {@code false} will be done by the {@link #setImage(RenderedImage, Rectangle)} method.</p>
*/
private void startListening() {
listening = true;
if (sourcesRoot.getValue() == null) {
setTreeRoot(image.get());
refreshTables();
}
}
/**
* Sets the image to show together with the coordinates of the region currently shown.
* If {@link #updateOnChange} is true, then the tree view is updated.
* Otherwise we will wait for the tree view to become visible before to update it.
*
* @param newValue the new image, or {@code null} if none.
* @param visibleBounds image region which is currently visible, or {@code null} if unspecified.
*/
final void setImage(final RenderedImage newValue, final Rectangle visibleBounds) {
visibleImageBounds = visibleBounds;
((ImageProperty) image).assign(newValue);
if (listening) {
final boolean immediate = updateOnChange.get();
setTreeRoot(immediate ? newValue : null);
if (immediate) {
refreshTables();
} else {
clearPropertyValues(true);
listening = false;
}
}
}
/**
* Returns the currently selected image. If no image is explicitly selected,
* returns the root {@linkplain #image} (which may be null).
*/
private RenderedImage getSelectedImage() {
final TreeItem<RenderedImage> item = selectedImage.get();
if (item != null) {
final RenderedImage selected = item.getValue();
if (selected != null) return selected;
}
return image.get();
}
/**
* Refresh all visual components except the tree of sources. This includes the table of
* image layouts, the table of property values and the details of selected property value.
*/
private void refreshTables() {
imageSelected(getSelectedImage());
}
/**
* Invoked when an image is selected in the tree of image sources. The selected image
* is not necessarily the {@link #image} property value; it may be one of its sources.
* If no image is explicitly selected, defaults to the root image.
*/
private void imageSelected(final RenderedImage selected) {
final Rectangle bounds = getVisibleImageBounds(selected);
final int n = layoutRows.size();
for (int i=0; i<n; i++) {
layoutRows.get(i).update(selected, bounds, i);
}
layoutFilter.set(bounds != null ? null : LayoutRow.EXCLUDE_VISIBILITY);
updatePropertyList(selected);
/*
* The selected property value may have changed as a result of above.
* If the details tab is visible, update immediately. Otherwise we will
* wait for that tab to become visible.
*/
if (detailsTab.isSelected()) {
updatePropertyDetails(bounds);
}
}
/**
* Returns the pixel coordinates of the region shown on screen,
* or {@code null} if none or does not apply to the currently selected image.
*/
final Rectangle getVisibleImageBounds(final RenderedImage selected) {
return Boolean.TRUE.equals(imageUseBoundsCS.get(selected)) ? visibleImageBounds : null;
}
/**
* Sets the root image together with its tree of sources.
*/
private void setTreeRoot(final RenderedImage newValue) {
imageUseBoundsCS.clear();
setTreeNode(sourcesRoot, newValue, imageUseBoundsCS, visibleImageBounds != null);
/*
* Remove entries associated to value `false` since our default value is `false`.
* The intent is to avoid unnecessary `RenderedImage` references for reducing the
* risk of memory retention.
*/
imageUseBoundsCS.values().removeIf((b) -> !b);
}
/**
* Invoked when tree under {@link #sourcesRoot} node needs to be updated. This method is not necessarily invoked
* immediately after an {@linkplain #image} change; the update may be deferred until the tree become visible.
*
* @param imageUseBoundsCS the {@link #imageUseBoundsCS} as an initially empty map. This map is
* populated by this method and opportunistically used for avoiding infinite recursion.
*/
private static void setTreeNode(final TreeItem<RenderedImage> root, final RenderedImage image,
final Map<RenderedImage,Boolean> imageUseBoundsCS, Boolean boundsApplicable)
{
root.setValue(image);
if (imageUseBoundsCS.putIfAbsent(image, boundsApplicable) == null) {
final ObservableList<TreeItem<RenderedImage>> children = root.getChildren();
if (image != null) {
final List<RenderedImage> sources = image.getSources();
if (sources != null) {
/*
* If the image is an instance of `ResampledImage`, then its
* source is presumed to use a different coordinate system.
*/
if (image instanceof ResampledImage) {
boundsApplicable = Boolean.FALSE;
}
final int numSrc = sources.size();
final int numDst = children.size();
final int n = Math.min(numSrc, numDst);
int i;
for (i=0; i<n; i++) {
setTreeNode(children.get(i), sources.get(i), imageUseBoundsCS, boundsApplicable);
}
for (; i<numSrc; i++) {
final TreeItem<RenderedImage> child = new TreeItem<>();
setTreeNode(child, sources.get(i), imageUseBoundsCS, boundsApplicable);
children.add(child);
}
if (i < numDst) {
children.remove(i, numDst);
}
return;
}
}
children.clear();
}
}
/**
* Creates the renderer of cells in the tree of image sources.
*/
private static final class SourceCell extends TreeCell<RenderedImage> {
/**
* Invoked by the cell factory (must have this exact signature).
*/
SourceCell(final TreeView<RenderedImage> tree) {
}
/**
* Invoked when a new image is shown in this cell node. This method also tests image consistency.
* If an inconsistency is found, the line is shown in red (except for "width" and "height") with
* a warning message. We do not use a red color for "width" and "height" because the mismatch may
* be normal.
*/
@Override protected void updateItem(final RenderedImage image, final boolean empty) {
super.updateItem(image, empty);
String text = null;
Color fill = Styles.NORMAL_TEXT;
if (image != null) {
/*
* Gets a simple top-level class name for an image class. If the image is an enclosed class,
* searches for a parent class because enclosed class names are often not very informative.
* For example, `ImageRenderer.Untitled` is a `BufferedImage` subclass.
*/
Class<?> type = image.getClass();
while (type.getEnclosingClass() != null) {
type = type.getSuperclass();
}
text = type.getSimpleName();
if (image instanceof PlanarImage) {
final String check = ((PlanarImage) image).verify();
if (check != null) {
text = Resources.format(Resources.Keys.InconsistencyIn_2, text, check);
if (!(check.equals("width") || check.equals("height"))) {
fill = Styles.ERROR_TEXT;
}
}
}
}
setText(text);
setTextFill(fill);
}
}
/**
* Creates the renderer of cells in the table of image layout information.
*/
private static final class LayoutCell extends TableCell<LayoutRow,Number> {
/**
* The formatter to use for numerical values in the table.
*/
private final NumberFormat integerFormat;
/**
* Invoked by the cell factory.
*/
LayoutCell(final NumberFormat integerFormat) {
this.integerFormat = integerFormat;
setAlignment(Pos.CENTER_RIGHT);
}
/**
* Invoked when a new value is shown in this table cell.
*/
@Override protected void updateItem(final Number value, final boolean empty) {
super.updateItem(value, empty);
setText(value != null ? integerFormat.format(value) : null);
}
}
/**
* Creates the renderer of cells in the table of image properties.
*/
private static final class PropertyCell extends TableCell<PropertyRow,Object> {
/**
* The formatter to use for producing a short string representation of a property value.
*/
private final PropertyValueFormatter format;
/**
* Temporary buffer user when formatting property values.
*/
private final StringBuilder buffer;
/**
* Invoked by the cell factory.
*/
PropertyCell(final Locale locale) {
buffer = new StringBuilder();
format = new PropertyValueFormatter(buffer, locale);
}
/**
* Invoked when a new value is shown in this table cell.
*/
@Override protected void updateItem(final Object value, final boolean empty) {
super.updateItem(value, empty);
String text = null;
if (!empty) try {
buffer.setLength(0);
format.appendValue(value);
format.flush();
text = buffer.toString();
} catch (IOException e) { // Should never happen since we write in a StringBuilder.
text = e.toString();
}
setText(text);
}
}
/**
* Update the list of properties for the given image.
* The {@link #propertyRows} are updated with an effort for reusing existing items when
* the property name is the same. The intent is to keep selection unchanged if possible
* (because removing a selected row may make it unselected).
*/
private void updatePropertyList(final RenderedImage selected) {
if (selected != null) {
final String[] properties = selected.getPropertyNames();
if (properties != null) {
int insertAt = 0;
nextProp: for (final String property : properties) {
if (property != null) {
for (int i=insertAt; i < propertyRows.size(); i++) {
if (propertyRows.get(i).update(selected, property)) {
propertyRows.remove(insertAt, i);
insertAt = i + 1;
continue nextProp;
}
}
propertyRows.add(insertAt++, new PropertyRow(selected, property));
}
}
propertyRows.remove(insertAt, propertyRows.size());
return;
}
}
propertyRows.clear();
}
/**
* Updates the {@link #detailsTab} with the value of currently selected property.
* This method may be invoked after the selection changed (but not immediately),
* or after the selected image changed (which indirectly changes the properties).
*
* @param bounds {@link #visibleImageBounds} or {@code null} if it does not apply to current image.
*/
private void updatePropertyDetails(final Rectangle bounds) {
final PropertyRow row = selectedProperty.get();
propertyDetails.set((row != null) ? row.value.get() : null, bounds);
}
/**
* Clears the table of property values and the content of {@link #detailsTab}.
* We do that when the tab became hidden and the image changed, in order to give
* a chance to the garbage collector to release memory.
*
* @param full whether to clears also the table in the "properties" tab (in addition of clearing the
* "details" tab). This parameter should be {@code false} if the properties tab is still visible.
*/
private void clearPropertyValues(final boolean full) {
if (propertyDetails != null) {
propertyDetails.clear();
detailsTab.setContent(null);
}
if (full) {
propertyRows.clear();
}
}
/**
* Returns the view of this explorer. The subclass is implementation dependent
* and may change in any future version.
*
* @return this explorer view.
*/
@Override
public Region getView() {
return view;
}
/**
* Returns the locale for controls and messages.
*
* @since 1.2
*/
@Override
public final Locale getLocale() {
return propertyDetails.getLocale();
}
}
|
googleapis/google-cloud-java | 35,279 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/EventSegmentCriteria.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* An event matches a criteria if the event meet the conditions in the
* criteria.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.EventSegmentCriteria}
*/
public final class EventSegmentCriteria extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.EventSegmentCriteria)
EventSegmentCriteriaOrBuilder {
private static final long serialVersionUID = 0L;
// Use EventSegmentCriteria.newBuilder() to construct.
private EventSegmentCriteria(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private EventSegmentCriteria() {
andConditionGroups_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new EventSegmentCriteria();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_EventSegmentCriteria_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_EventSegmentCriteria_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.EventSegmentCriteria.class,
com.google.analytics.data.v1alpha.EventSegmentCriteria.Builder.class);
}
public static final int AND_CONDITION_GROUPS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.data.v1alpha.EventSegmentConditionGroup>
andConditionGroups_;
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.data.v1alpha.EventSegmentConditionGroup>
getAndConditionGroupsList() {
return andConditionGroups_;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder>
getAndConditionGroupsOrBuilderList() {
return andConditionGroups_;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public int getAndConditionGroupsCount() {
return andConditionGroups_.size();
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentConditionGroup getAndConditionGroups(
int index) {
return andConditionGroups_.get(index);
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder
getAndConditionGroupsOrBuilder(int index) {
return andConditionGroups_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < andConditionGroups_.size(); i++) {
output.writeMessage(1, andConditionGroups_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < andConditionGroups_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, andConditionGroups_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.EventSegmentCriteria)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.EventSegmentCriteria other =
(com.google.analytics.data.v1alpha.EventSegmentCriteria) obj;
if (!getAndConditionGroupsList().equals(other.getAndConditionGroupsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAndConditionGroupsCount() > 0) {
hash = (37 * hash) + AND_CONDITION_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getAndConditionGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.data.v1alpha.EventSegmentCriteria prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* An event matches a criteria if the event meet the conditions in the
* criteria.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.EventSegmentCriteria}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.EventSegmentCriteria)
com.google.analytics.data.v1alpha.EventSegmentCriteriaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_EventSegmentCriteria_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_EventSegmentCriteria_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.EventSegmentCriteria.class,
com.google.analytics.data.v1alpha.EventSegmentCriteria.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.EventSegmentCriteria.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (andConditionGroupsBuilder_ == null) {
andConditionGroups_ = java.util.Collections.emptyList();
} else {
andConditionGroups_ = null;
andConditionGroupsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.ReportingApiProto
.internal_static_google_analytics_data_v1alpha_EventSegmentCriteria_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentCriteria getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.EventSegmentCriteria.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentCriteria build() {
com.google.analytics.data.v1alpha.EventSegmentCriteria result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentCriteria buildPartial() {
com.google.analytics.data.v1alpha.EventSegmentCriteria result =
new com.google.analytics.data.v1alpha.EventSegmentCriteria(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.data.v1alpha.EventSegmentCriteria result) {
if (andConditionGroupsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
andConditionGroups_ = java.util.Collections.unmodifiableList(andConditionGroups_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.andConditionGroups_ = andConditionGroups_;
} else {
result.andConditionGroups_ = andConditionGroupsBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.data.v1alpha.EventSegmentCriteria result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.EventSegmentCriteria) {
return mergeFrom((com.google.analytics.data.v1alpha.EventSegmentCriteria) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.EventSegmentCriteria other) {
if (other == com.google.analytics.data.v1alpha.EventSegmentCriteria.getDefaultInstance())
return this;
if (andConditionGroupsBuilder_ == null) {
if (!other.andConditionGroups_.isEmpty()) {
if (andConditionGroups_.isEmpty()) {
andConditionGroups_ = other.andConditionGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.addAll(other.andConditionGroups_);
}
onChanged();
}
} else {
if (!other.andConditionGroups_.isEmpty()) {
if (andConditionGroupsBuilder_.isEmpty()) {
andConditionGroupsBuilder_.dispose();
andConditionGroupsBuilder_ = null;
andConditionGroups_ = other.andConditionGroups_;
bitField0_ = (bitField0_ & ~0x00000001);
andConditionGroupsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAndConditionGroupsFieldBuilder()
: null;
} else {
andConditionGroupsBuilder_.addAllMessages(other.andConditionGroups_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.data.v1alpha.EventSegmentConditionGroup m =
input.readMessage(
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.parser(),
extensionRegistry);
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(m);
} else {
andConditionGroupsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.data.v1alpha.EventSegmentConditionGroup>
andConditionGroups_ = java.util.Collections.emptyList();
private void ensureAndConditionGroupsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
andConditionGroups_ =
new java.util.ArrayList<com.google.analytics.data.v1alpha.EventSegmentConditionGroup>(
andConditionGroups_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.EventSegmentConditionGroup,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder>
andConditionGroupsBuilder_;
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<com.google.analytics.data.v1alpha.EventSegmentConditionGroup>
getAndConditionGroupsList() {
if (andConditionGroupsBuilder_ == null) {
return java.util.Collections.unmodifiableList(andConditionGroups_);
} else {
return andConditionGroupsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public int getAndConditionGroupsCount() {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.size();
} else {
return andConditionGroupsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.EventSegmentConditionGroup getAndConditionGroups(
int index) {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.get(index);
} else {
return andConditionGroupsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder setAndConditionGroups(
int index, com.google.analytics.data.v1alpha.EventSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.set(index, value);
onChanged();
} else {
andConditionGroupsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder setAndConditionGroups(
int index,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.set(index, builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
com.google.analytics.data.v1alpha.EventSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(value);
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
int index, com.google.analytics.data.v1alpha.EventSegmentConditionGroup value) {
if (andConditionGroupsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(index, value);
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAndConditionGroups(
int index,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder builderForValue) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.add(index, builderForValue.build());
onChanged();
} else {
andConditionGroupsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder addAllAndConditionGroups(
java.lang.Iterable<? extends com.google.analytics.data.v1alpha.EventSegmentConditionGroup>
values) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, andConditionGroups_);
onChanged();
} else {
andConditionGroupsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder clearAndConditionGroups() {
if (andConditionGroupsBuilder_ == null) {
andConditionGroups_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
andConditionGroupsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public Builder removeAndConditionGroups(int index) {
if (andConditionGroupsBuilder_ == null) {
ensureAndConditionGroupsIsMutable();
andConditionGroups_.remove(index);
onChanged();
} else {
andConditionGroupsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder
getAndConditionGroupsBuilder(int index) {
return getAndConditionGroupsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder
getAndConditionGroupsOrBuilder(int index) {
if (andConditionGroupsBuilder_ == null) {
return andConditionGroups_.get(index);
} else {
return andConditionGroupsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<
? extends com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder>
getAndConditionGroupsOrBuilderList() {
if (andConditionGroupsBuilder_ != null) {
return andConditionGroupsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(andConditionGroups_);
}
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder
addAndConditionGroupsBuilder() {
return getAndConditionGroupsFieldBuilder()
.addBuilder(
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder
addAndConditionGroupsBuilder(int index) {
return getAndConditionGroupsFieldBuilder()
.addBuilder(
index,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.getDefaultInstance());
}
/**
*
*
* <pre>
* An event matches this criteria if the event matches each of these
* `andConditionGroups`.
* </pre>
*
* <code>
* repeated .google.analytics.data.v1alpha.EventSegmentConditionGroup and_condition_groups = 1;
* </code>
*/
public java.util.List<com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder>
getAndConditionGroupsBuilderList() {
return getAndConditionGroupsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.EventSegmentConditionGroup,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder>
getAndConditionGroupsFieldBuilder() {
if (andConditionGroupsBuilder_ == null) {
andConditionGroupsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.EventSegmentConditionGroup,
com.google.analytics.data.v1alpha.EventSegmentConditionGroup.Builder,
com.google.analytics.data.v1alpha.EventSegmentConditionGroupOrBuilder>(
andConditionGroups_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
andConditionGroups_ = null;
}
return andConditionGroupsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.EventSegmentCriteria)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.EventSegmentCriteria)
private static final com.google.analytics.data.v1alpha.EventSegmentCriteria DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.EventSegmentCriteria();
}
public static com.google.analytics.data.v1alpha.EventSegmentCriteria getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<EventSegmentCriteria> PARSER =
new com.google.protobuf.AbstractParser<EventSegmentCriteria>() {
@java.lang.Override
public EventSegmentCriteria parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<EventSegmentCriteria> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<EventSegmentCriteria> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.EventSegmentCriteria getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,291 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateVolumeRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/volume.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* Message for updating a Volume
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateVolumeRequest}
*/
public final class UpdateVolumeRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateVolumeRequest)
UpdateVolumeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateVolumeRequest.newBuilder() to construct.
private UpdateVolumeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateVolumeRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateVolumeRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.VolumeProto
.internal_static_google_cloud_netapp_v1_UpdateVolumeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.VolumeProto
.internal_static_google_cloud_netapp_v1_UpdateVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateVolumeRequest.class,
com.google.cloud.netapp.v1.UpdateVolumeRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int VOLUME_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.Volume volume_;
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the volume field is set.
*/
@java.lang.Override
public boolean hasVolume() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The volume.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.Volume getVolume() {
return volume_ == null ? com.google.cloud.netapp.v1.Volume.getDefaultInstance() : volume_;
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.VolumeOrBuilder getVolumeOrBuilder() {
return volume_ == null ? com.google.cloud.netapp.v1.Volume.getDefaultInstance() : volume_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getVolume());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getVolume());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.UpdateVolumeRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.UpdateVolumeRequest other =
(com.google.cloud.netapp.v1.UpdateVolumeRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasVolume() != other.hasVolume()) return false;
if (hasVolume()) {
if (!getVolume().equals(other.getVolume())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasVolume()) {
hash = (37 * hash) + VOLUME_FIELD_NUMBER;
hash = (53 * hash) + getVolume().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateVolumeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for updating a Volume
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateVolumeRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateVolumeRequest)
com.google.cloud.netapp.v1.UpdateVolumeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.VolumeProto
.internal_static_google_cloud_netapp_v1_UpdateVolumeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.VolumeProto
.internal_static_google_cloud_netapp_v1_UpdateVolumeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateVolumeRequest.class,
com.google.cloud.netapp.v1.UpdateVolumeRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.UpdateVolumeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getVolumeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
volume_ = null;
if (volumeBuilder_ != null) {
volumeBuilder_.dispose();
volumeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.VolumeProto
.internal_static_google_cloud_netapp_v1_UpdateVolumeRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateVolumeRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.UpdateVolumeRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateVolumeRequest build() {
com.google.cloud.netapp.v1.UpdateVolumeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateVolumeRequest buildPartial() {
com.google.cloud.netapp.v1.UpdateVolumeRequest result =
new com.google.cloud.netapp.v1.UpdateVolumeRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.UpdateVolumeRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.volume_ = volumeBuilder_ == null ? volume_ : volumeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.UpdateVolumeRequest) {
return mergeFrom((com.google.cloud.netapp.v1.UpdateVolumeRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateVolumeRequest other) {
if (other == com.google.cloud.netapp.v1.UpdateVolumeRequest.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasVolume()) {
mergeVolume(other.getVolume());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getVolumeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Volume resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.netapp.v1.Volume volume_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Volume,
com.google.cloud.netapp.v1.Volume.Builder,
com.google.cloud.netapp.v1.VolumeOrBuilder>
volumeBuilder_;
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the volume field is set.
*/
public boolean hasVolume() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The volume.
*/
public com.google.cloud.netapp.v1.Volume getVolume() {
if (volumeBuilder_ == null) {
return volume_ == null ? com.google.cloud.netapp.v1.Volume.getDefaultInstance() : volume_;
} else {
return volumeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVolume(com.google.cloud.netapp.v1.Volume value) {
if (volumeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
volume_ = value;
} else {
volumeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setVolume(com.google.cloud.netapp.v1.Volume.Builder builderForValue) {
if (volumeBuilder_ == null) {
volume_ = builderForValue.build();
} else {
volumeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeVolume(com.google.cloud.netapp.v1.Volume value) {
if (volumeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& volume_ != null
&& volume_ != com.google.cloud.netapp.v1.Volume.getDefaultInstance()) {
getVolumeBuilder().mergeFrom(value);
} else {
volume_ = value;
}
} else {
volumeBuilder_.mergeFrom(value);
}
if (volume_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearVolume() {
bitField0_ = (bitField0_ & ~0x00000002);
volume_ = null;
if (volumeBuilder_ != null) {
volumeBuilder_.dispose();
volumeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.Volume.Builder getVolumeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getVolumeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.VolumeOrBuilder getVolumeOrBuilder() {
if (volumeBuilder_ != null) {
return volumeBuilder_.getMessageOrBuilder();
} else {
return volume_ == null ? com.google.cloud.netapp.v1.Volume.getDefaultInstance() : volume_;
}
}
/**
*
*
* <pre>
* Required. The volume being updated
* </pre>
*
* <code>.google.cloud.netapp.v1.Volume volume = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Volume,
com.google.cloud.netapp.v1.Volume.Builder,
com.google.cloud.netapp.v1.VolumeOrBuilder>
getVolumeFieldBuilder() {
if (volumeBuilder_ == null) {
volumeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.Volume,
com.google.cloud.netapp.v1.Volume.Builder,
com.google.cloud.netapp.v1.VolumeOrBuilder>(
getVolume(), getParentForChildren(), isClean());
volume_ = null;
}
return volumeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateVolumeRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateVolumeRequest)
private static final com.google.cloud.netapp.v1.UpdateVolumeRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateVolumeRequest();
}
public static com.google.cloud.netapp.v1.UpdateVolumeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateVolumeRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateVolumeRequest>() {
@java.lang.Override
public UpdateVolumeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateVolumeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateVolumeRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateVolumeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 35,474 | modules/raft/src/test/java/org/apache/ignite/raft/jraft/core/ReplicatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.raft.jraft.core;
import static org.apache.ignite.internal.util.ArrayUtils.EMPTY_BYTE_BUFFER;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import org.apache.ignite.internal.testframework.BaseIgniteAbstractTest;
import org.apache.ignite.raft.jraft.JRaftUtils;
import org.apache.ignite.raft.jraft.Status;
import org.apache.ignite.raft.jraft.closure.CatchUpClosure;
import org.apache.ignite.raft.jraft.core.Replicator.RequestType;
import org.apache.ignite.raft.jraft.entity.EnumOutter;
import org.apache.ignite.raft.jraft.entity.LogEntry;
import org.apache.ignite.raft.jraft.entity.LogId;
import org.apache.ignite.raft.jraft.entity.PeerId;
import org.apache.ignite.raft.jraft.entity.RaftOutter;
import org.apache.ignite.raft.jraft.error.RaftError;
import org.apache.ignite.raft.jraft.error.RaftException;
import org.apache.ignite.raft.jraft.option.NodeOptions;
import org.apache.ignite.raft.jraft.option.RaftOptions;
import org.apache.ignite.raft.jraft.option.ReplicatorOptions;
import org.apache.ignite.raft.jraft.rpc.AppendEntriesRequestBuilder;
import org.apache.ignite.raft.jraft.rpc.Message;
import org.apache.ignite.raft.jraft.rpc.RaftClientService;
import org.apache.ignite.raft.jraft.rpc.RpcRequests;
import org.apache.ignite.raft.jraft.rpc.RpcResponseClosureAdapter;
import org.apache.ignite.raft.jraft.storage.LogManager;
import org.apache.ignite.raft.jraft.storage.SnapshotStorage;
import org.apache.ignite.raft.jraft.storage.snapshot.SnapshotReader;
import org.apache.ignite.raft.jraft.util.ExecutorServiceHelper;
import org.apache.ignite.raft.jraft.util.ThreadId;
import org.apache.ignite.raft.jraft.util.Utils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.mockito.stubbing.Answer;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
public class ReplicatorTest extends BaseIgniteAbstractTest {
private ThreadId id;
private final RaftOptions raftOptions = new RaftOptions();
private TimerManager timerManager;
@Mock
private RaftClientService rpcService;
@Mock
private NodeImpl node;
@Mock
private BallotBox ballotBox;
@Mock
private LogManager logManager;
@Mock
private SnapshotStorage snapshotStorage;
private ReplicatorOptions opts;
private final PeerId peerId = new PeerId("localhost", 8081);
private ExecutorService executor;
@BeforeEach
public void setup() {
this.timerManager = new TimerManager(5);
this.opts = new ReplicatorOptions();
this.opts.setRaftRpcService(this.rpcService);
this.opts.setPeerId(this.peerId);
this.opts.setBallotBox(this.ballotBox);
this.opts.setGroupId("test");
this.opts.setTerm(1);
this.opts.setServerId(new PeerId("localhost", 8082));
this.opts.setNode(this.node);
this.opts.setSnapshotStorage(this.snapshotStorage);
this.opts.setTimerManager(this.timerManager);
this.opts.setLogManager(this.logManager);
this.opts.setDynamicHeartBeatTimeoutMs(100);
this.opts.setElectionTimeoutMs(1000);
NodeOptions options = new NodeOptions();
executor = JRaftUtils.createExecutor("test-node", "test-executor-", Utils.cpus());
options.setCommonExecutor(executor);
Mockito.when(this.logManager.getLastLogIndex()).thenReturn(10L);
Mockito.when(this.logManager.getTerm(10)).thenReturn(1L);
Mockito.when(this.rpcService.connect(this.peerId)).thenReturn(true);
Mockito.when(this.node.getNodeMetrics()).thenReturn(new NodeMetrics(true));
Mockito.when(this.node.getOptions()).thenReturn(options);
// mock send empty entries
mockSendEmptyEntries();
this.id = Replicator.start(this.opts, this.raftOptions);
}
private void mockSendEmptyEntries() {
this.mockSendEmptyEntries(false);
}
private void mockSendEmptyEntries(final boolean isHeartbeat) {
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest(isHeartbeat);
Mockito.when(this.rpcService.appendEntries(eq(this.peerId), eq(request), eq(-1), Mockito.any()))
.thenReturn(new CompletableFuture<>());
}
private RpcRequests.AppendEntriesRequest createEmptyEntriesRequest() {
return this.createEmptyEntriesRequest(false);
}
private RpcRequests.AppendEntriesRequest createEmptyEntriesRequest(final boolean isHeartbeat) {
AppendEntriesRequestBuilder rb = raftOptions.getRaftMessagesFactory().appendEntriesRequest()
.groupId("test")
.serverId(new PeerId("localhost", 8082).toString())
.peerId(this.peerId.toString())
.term(1)
.prevLogIndex(10)
.prevLogTerm(1)
.committedIndex(0);
if (!isHeartbeat) {
rb.data(EMPTY_BYTE_BUFFER);
}
return rb.build();
}
@AfterEach
public void teardown() {
this.timerManager.shutdown();
ExecutorServiceHelper.shutdownAndAwaitTermination(executor);
}
@Test
public void testStartDestroyJoin() throws Exception {
assertNotNull(this.id);
final Replicator r = getReplicator();
assertNotNull(r);
assertNotNull(r.getRpcInFly());
assertEquals(Replicator.RunningState.APPENDING_ENTRIES, r.statInfo.runningState);
assertSame(r.getOpts(), this.opts);
this.id.unlock();
assertEquals(0, Replicator.getNextIndex(this.id));
assertNotNull(r.getHeartbeatTimer());
r.destroy();
Replicator.join(this.id);
assertTrue(r.id.isDestroyed());
}
@Test
public void testMetricRemoveOnDestroy() {
assertNotNull(this.id);
final Replicator r = getReplicator();
assertNotNull(r);
assertSame(r.getOpts(), this.opts);
Set<String> metrics = this.opts.getNode().getNodeMetrics().getMetricRegistry().getNames();
assertEquals(7, metrics.size());
r.destroy();
metrics = this.opts.getNode().getNodeMetrics().getMetricRegistry().getNames();
assertEquals(0, metrics.size());
}
private Replicator getReplicator() {
return (Replicator) this.id.lock();
}
@Test
public void testOnRpcReturnedRpcError() {
testRpcReturnedError();
}
private Replicator testRpcReturnedError() {
final Replicator r = getReplicator();
assertNull(r.getBlockTimer());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(12)
.term(2)
.build();
this.id.unlock();
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, new Status(-1, "test error"), request,
response, 0, 0, Utils.monotonicMs());
assertEquals(Replicator.RunningState.BLOCKING, r.statInfo.runningState);
assertNotNull(r.getBlockTimer());
return r;
}
@Test
public void testOnRpcReturnedRpcContinuousError() throws Exception {
Replicator r = testRpcReturnedError();
ScheduledFuture<?> timer = r.getBlockTimer();
assertNotNull(timer);
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(12)
.term(2)
.build();
r.getInflights().add(new Replicator.Inflight(RequestType.AppendEntries, r.getNextSendIndex(), 0, 0, 1, null));
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, new Status(-1, "test error"), request,
response, 1, 1, Utils.monotonicMs());
assertEquals(Replicator.RunningState.BLOCKING, r.statInfo.runningState);
assertNotNull(r.getBlockTimer());
// the same timer
assertSame(timer, r.getBlockTimer());
Thread.sleep(r.getOpts().getDynamicHeartBeatTimeoutMs() * 2);
r.getInflights().add(new Replicator.Inflight(RequestType.AppendEntries, r.getNextSendIndex(), 0, 0, 1, null));
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, new Status(-1, "test error"), request,
response, 1, 2, Utils.monotonicMs());
assertEquals(Replicator.RunningState.BLOCKING, r.statInfo.runningState);
assertNotNull(r.getBlockTimer());
// the same timer
assertNotSame(timer, r.getBlockTimer());
}
@Test
public void testOnRpcReturnedTermMismatch() {
final Replicator r = getReplicator();
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(12)
.term(2)
.build();
this.id.unlock();
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0,
Utils.monotonicMs());
Mockito.verify(this.node).increaseTermTo(
2,
new Status(RaftError.EHIGHERTERMRESPONSE, "Leader receives higher term heartbeat_response from peer:%s",
this.peerId));
assertTrue(r.id.isDestroyed());
}
@Test
public void testOnRpcReturnedMoreLogs() {
final Replicator r = getReplicator();
assertEquals(11, r.getRealNextIndex());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(12)
.term(1)
.build();
this.id.unlock();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
Mockito.when(this.logManager.getTerm(9)).thenReturn(1L);
final RpcRequests.AppendEntriesRequest newReq = raftOptions.getRaftMessagesFactory()
.appendEntriesRequest()
.groupId("test")
.serverId(new PeerId("localhost", 8082).toString())
.peerId(this.peerId.toString())
.term(1)
.prevLogIndex(9)
.data(EMPTY_BYTE_BUFFER)
.prevLogTerm(1)
.committedIndex(0)
.build();
Mockito.when(this.rpcService.appendEntries(eq(this.peerId), eq(newReq), eq(-1), Mockito.any()))
.thenReturn(new CompletableFuture<>());
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0,
Utils.monotonicMs());
assertNotNull(r.getRpcInFly());
assertNotSame(r.getRpcInFly(), rpcInFly);
assertEquals(Replicator.RunningState.APPENDING_ENTRIES, r.statInfo.runningState);
this.id.unlock();
assertEquals(0, Replicator.getNextIndex(this.id));
assertEquals(10, r.getRealNextIndex());
}
@Test
public void testOnRpcReturnedLessLogs() {
final Replicator r = getReplicator();
assertEquals(11, r.getRealNextIndex());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(8)
.term(1)
.build();
this.id.unlock();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
Mockito.when(this.logManager.getTerm(8)).thenReturn(1L);
final RpcRequests.AppendEntriesRequest newReq = raftOptions.getRaftMessagesFactory()
.appendEntriesRequest()
.groupId("test")
.serverId(new PeerId("localhost", 8082).toString())
.peerId(this.peerId.toString())
.term(1)
.prevLogIndex(8)
.prevLogTerm(1)
.data(EMPTY_BYTE_BUFFER)
.committedIndex(0)
.build();
Mockito.when(this.rpcService.appendEntries(eq(this.peerId), eq(newReq), eq(-1), Mockito.any()))
.thenReturn(new CompletableFuture<>());
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0,
Utils.monotonicMs());
assertNotNull(r.getRpcInFly());
assertNotSame(r.getRpcInFly(), rpcInFly);
assertEquals(Replicator.RunningState.APPENDING_ENTRIES, r.statInfo.runningState);
this.id.unlock();
assertEquals(0, Replicator.getNextIndex(this.id));
assertEquals(9, r.getRealNextIndex());
}
@Test
public void testOnRpcReturnedWaitMoreEntries() throws Exception {
final Replicator r = getReplicator();
assertEquals(-1, r.getWaitId());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(true)
.lastLogIndex(10)
.term(1)
.build();
this.id.unlock();
Mockito.when(this.logManager.wait(eq(10L), Mockito.any(), same(this.id))).thenReturn(99L);
final CountDownLatch latch = new CountDownLatch(1);
Replicator.waitForCaughtUp(this.id, 1, System.currentTimeMillis() + 5000, new CatchUpClosure() {
@Override
public void run(final Status status) {
assertTrue(status.isOk());
latch.countDown();
}
}, node.getOptions().getCommonExecutor());
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0,
Utils.monotonicMs());
assertEquals(Replicator.RunningState.IDLE, r.statInfo.runningState);
this.id.unlock(); // TODO asch fix bad unlock IGNITE-14832
assertEquals(11, Replicator.getNextIndex(this.id));
assertEquals(99, r.getWaitId());
latch.await(); //make sure catch up closure is invoked.
}
@Test
public void testStop() {
final Replicator r = getReplicator();
this.id.unlock();
assertNotNull(r.getHeartbeatTimer());
assertNotNull(r.getRpcInFly());
Replicator.stop(this.id);
assertTrue(r.id.isDestroyed());
assertNull(r.getHeartbeatTimer());
assertNull(r.getRpcInFly());
}
@Test
public void testSetErrorStop() {
final Replicator r = getReplicator();
this.id.unlock();
assertNotNull(r.getHeartbeatTimer());
assertNotNull(r.getRpcInFly());
this.id.setError(RaftError.ESTOP.getNumber());
this.id.unlock();
assertTrue(r.id.isDestroyed());
assertNull(r.getHeartbeatTimer());
assertNull(r.getRpcInFly());
}
@Test
public void testContinueSendingTimeout() throws Exception {
testOnRpcReturnedWaitMoreEntries();
final Replicator r = getReplicator();
this.id.unlock();
mockSendEmptyEntries();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
assertTrue(Replicator.continueSending(this.id, RaftError.ETIMEDOUT.getNumber()));
assertNotNull(r.getRpcInFly());
assertNotSame(rpcInFly, r.getRpcInFly());
}
@Test
public void testContinueSendingEntries() throws Exception {
testOnRpcReturnedWaitMoreEntries();
final Replicator r = getReplicator();
this.id.unlock();
mockSendEmptyEntries();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
final AppendEntriesRequestBuilder rb = raftOptions.getRaftMessagesFactory()
.appendEntriesRequest()
.groupId("test")
.serverId(new PeerId("localhost", 8082).toString())
.peerId(this.peerId.toString())
.term(1)
.prevLogIndex(10)
.prevLogTerm(1)
.committedIndex(0);
int totalDataLen = 0;
List<RaftOutter.EntryMeta> entries = new ArrayList<>();
for (int i = 0; i < 10; i++) {
totalDataLen += i;
final LogEntry value = new LogEntry();
value.setData(ByteBuffer.allocate(i));
value.setType(EnumOutter.EntryType.ENTRY_TYPE_DATA);
value.setId(new LogId(11 + i, 1));
Mockito.when(this.logManager.getEntry(11 + i)).thenReturn(value);
entries.add(raftOptions.getRaftMessagesFactory()
.entryMeta()
.term(1)
.type(EnumOutter.EntryType.ENTRY_TYPE_DATA)
.dataLen(i)
.build());
}
rb.entriesList(entries);
rb.data(ByteBuffer.wrap(new byte[totalDataLen]));
final RpcRequests.AppendEntriesRequest request = rb.build();
Mockito.when(this.rpcService.appendEntries(eq(this.peerId), eq(request), eq(-1), Mockito.any()))
.thenAnswer(new Answer<Future>() {
@Override public Future answer(InvocationOnMock invocation) throws Throwable {
return new CompletableFuture<>();
}
});
assertEquals(11, r.statInfo.firstLogIndex);
assertEquals(10, r.statInfo.lastLogIndex);
Mockito.when(this.logManager.getTerm(20)).thenReturn(1L);
assertTrue(Replicator.continueSending(this.id, 0));
assertNotNull(r.getRpcInFly());
assertNotSame(rpcInFly, r.getRpcInFly());
assertEquals(11, r.statInfo.firstLogIndex);
assertEquals(20, r.statInfo.lastLogIndex);
assertEquals(0, r.getWaitId());
assertEquals(Replicator.RunningState.IDLE, r.statInfo.runningState);
}
@Test
public void testSetErrorTimeout() throws Exception {
final Replicator r = getReplicator();
this.id.unlock();
assertNull(r.getHeartbeatInFly());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest(true);
Mockito.when(
this.rpcService.appendEntries(eq(this.peerId), eq(request),
eq(this.opts.getElectionTimeoutMs() / 2), Mockito.any())).thenReturn(new CompletableFuture<>());
this.id.setError(RaftError.ETIMEDOUT.getNumber());
Thread.sleep(this.opts.getElectionTimeoutMs() + 1000);
assertNotNull(r.getHeartbeatInFly());
}
@Test
public void testOnHeartbeatReturnedRpcError() {
final Replicator r = getReplicator();
this.id.unlock();
final ScheduledFuture<?> timer = r.getHeartbeatTimer();
assertNotNull(timer);
r.onHeartbeatReturned(this.id, new Status(-1, "test"), createEmptyEntriesRequest(), null,
Utils.monotonicMs());
assertNotNull(r.getHeartbeatTimer());
assertNotSame(timer, r.getHeartbeatTimer());
}
@Test
public void testOnHeartbeatReturnedOK() {
final Replicator r = getReplicator();
this.id.unlock();
final ScheduledFuture<?> timer = r.getHeartbeatTimer();
assertNotNull(timer);
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(10)
.term(1)
.build();
r.onHeartbeatReturned(this.id, Status.OK(), createEmptyEntriesRequest(), response, Utils.monotonicMs());
assertNotNull(r.getHeartbeatTimer());
assertNotSame(timer, r.getHeartbeatTimer());
}
@Test
public void testOnHeartbeatReturnedTermMismatch() {
final Replicator r = getReplicator();
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(false)
.lastLogIndex(12)
.term(2)
.build();
this.id.unlock();
r.onHeartbeatReturned(this.id, Status.OK(), request, response, Utils.monotonicMs());
Mockito.verify(this.node).increaseTermTo(
2,
new Status(RaftError.EHIGHERTERMRESPONSE, "Leader receives higher term heartbeat_response from peer:%s",
this.peerId));
assertTrue(r.id.isDestroyed());
}
@Test
public void testTransferLeadership() {
final Replicator r = getReplicator();
this.id.unlock();
assertEquals(0, r.getTimeoutNowIndex());
assertTrue(Replicator.transferLeadership(this.id, 11));
assertEquals(11, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
}
@Test
public void testStopTransferLeadership() {
testTransferLeadership();
Replicator.stopTransferLeadership(this.id);
final Replicator r = getReplicator();
this.id.unlock();
assertEquals(0, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
}
@Test
public void testTransferLeadershipSendTimeoutNow() {
final Replicator r = getReplicator();
this.id.unlock();
r.setHasSucceeded();
assertEquals(0, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
Mockito.when(
this.rpcService.timeoutNow(eq(this.opts.getPeerId()), Mockito.any(), eq(-1),
Mockito.any())).thenReturn(new CompletableFuture<>());
assertTrue(Replicator.transferLeadership(this.id, 10));
assertEquals(0, r.getTimeoutNowIndex());
assertNotNull(r.getTimeoutNowInFly());
}
@Test
public void testSendHeartbeat() {
final Replicator r = getReplicator();
this.id.unlock();
assertNull(r.getHeartbeatInFly());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest(true);
Mockito.when(
this.rpcService.appendEntries(eq(this.peerId), eq(request),
eq(this.opts.getElectionTimeoutMs() / 2), Mockito.any())).thenAnswer(new Answer<Future>() {
@Override public Future answer(InvocationOnMock invocation) throws Throwable {
return new CompletableFuture<>();
}
});
Replicator.sendHeartbeat(this.id, new RpcResponseClosureAdapter<RpcRequests.AppendEntriesResponse>() {
@Override
public void run(final Status status) {
assertTrue(status.isOk());
}
}, node.getOptions().getCommonExecutor());
assertNotNull(r.getHeartbeatInFly());
assertSame(r, this.id.lock());
this.id.unlock();
}
@Test
public void testSendTimeoutNowAndStop() {
final Replicator r = getReplicator();
this.id.unlock();
r.setHasSucceeded();
assertEquals(0, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
assertTrue(Replicator.sendTimeoutNowAndStop(this.id, 10));
assertEquals(0, r.getTimeoutNowIndex());
assertNull(r.getTimeoutNowInFly());
Mockito.verify(this.rpcService).timeoutNow(eq(this.opts.getPeerId()), Mockito.any(),
eq(10), Mockito.any());
}
private RpcRequests.TimeoutNowRequest createTimeoutnowRequest() {
return raftOptions.getRaftMessagesFactory()
.timeoutNowRequest()
.term(this.opts.getTerm())
.groupId(this.opts.getGroupId())
.serverId(this.opts.getServerId().toString())
.peerId(this.opts.getPeerId().toString())
.timestamp(this.node.getOptions().getClock().now())
.build();
}
@Test
public void testOnTimeoutNowReturnedRpcErrorAndStop() {
final Replicator r = getReplicator();
final RpcRequests.TimeoutNowRequest request = createTimeoutnowRequest();
this.id.unlock();
Replicator.onTimeoutNowReturned(this.id, new Status(-1, "test"), request, null, true);
assertTrue(r.id.isDestroyed());
}
@Test
public void testInstallSnapshotNoReader() {
final Replicator r = getReplicator();
this.id.unlock();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
r.installSnapshot();
final ArgumentCaptor<RaftException> errArg = ArgumentCaptor.forClass(RaftException.class);
Mockito.verify(this.node).onError(errArg.capture());
assertEquals(RaftError.EIO, errArg.getValue().getStatus().getRaftError());
assertEquals("Fail to open snapshot", errArg.getValue().getStatus().getErrorMsg());
}
@Test
public void testInstallSnapshot() {
final Replicator r = getReplicator();
this.id.unlock();
final Future<Message> rpcInFly = r.getRpcInFly();
assertNotNull(rpcInFly);
final SnapshotReader reader = Mockito.mock(SnapshotReader.class);
Mockito.when(this.snapshotStorage.open()).thenReturn(reader);
final String uri = "remote://localhost:8081/99";
Mockito.when(reader.generateURIForCopy()).thenReturn(uri);
final RaftOutter.SnapshotMeta meta = raftOptions.getRaftMessagesFactory().snapshotMeta()
.lastIncludedIndex(11)
.lastIncludedTerm(1)
.build();
Mockito.when(reader.load()).thenReturn(meta);
assertEquals(0, r.statInfo.lastLogIncluded);
assertEquals(0, r.statInfo.lastTermIncluded);
final RpcRequests.InstallSnapshotRequest req = raftOptions.getRaftMessagesFactory()
.installSnapshotRequest()
.term(this.opts.getTerm())
.groupId(this.opts.getGroupId())
.serverId(this.opts.getServerId().toString())
.peerId(this.opts.getPeerId().toString())
.meta(meta)
.uri(uri)
.build();
Mockito.when(
this.rpcService.installSnapshot(eq(this.opts.getPeerId()), eq(req),
Mockito.any())).thenReturn(new CompletableFuture<>());
r.installSnapshot();
assertNotNull(r.getRpcInFly());
assertNotSame(r.getRpcInFly(), rpcInFly);
assertEquals(Replicator.RunningState.INSTALLING_SNAPSHOT, r.statInfo.runningState);
assertEquals(11, r.statInfo.lastLogIncluded);
assertEquals(1, r.statInfo.lastTermIncluded);
}
@Test
public void testOnTimeoutNowReturnedTermMismatch() {
final Replicator r = getReplicator();
this.id.unlock();
final RpcRequests.TimeoutNowRequest request = createTimeoutnowRequest();
final RpcRequests.TimeoutNowResponse response = raftOptions.getRaftMessagesFactory()
.timeoutNowResponse()
.success(false)
.term(12)
.build();
this.id.unlock();
Replicator.onTimeoutNowReturned(this.id, Status.OK(), request, response, false);
Mockito.verify(this.node).increaseTermTo(
12,
new Status(RaftError.EHIGHERTERMRESPONSE, "Leader receives higher term timeout_now_response from peer:%s",
this.peerId));
assertTrue(r.id.isDestroyed());
}
@Test
public void testOnInstallSnapshotReturned() {
final Replicator r = getReplicator();
this.id.unlock();
assertNull(r.getBlockTimer());
final RpcRequests.InstallSnapshotRequest request = createInstallSnapshotRequest();
final RpcRequests.InstallSnapshotResponse response = raftOptions.getRaftMessagesFactory()
.installSnapshotResponse()
.success(true)
.term(1)
.build();
assertEquals(-1, r.getWaitId());
Mockito.when(this.logManager.getTerm(11)).thenReturn(1L);
r.onRpcReturned(this.id, Replicator.RequestType.Snapshot, Status.OK(), request, response, 0, 0, -1);
assertNull(r.getBlockTimer());
assertEquals(0, r.getWaitId());
}
@Test
public void testOnInstallSnapshotReturnedRpcError() {
final Replicator r = getReplicator();
this.id.unlock();
assertNull(r.getBlockTimer());
final RpcRequests.InstallSnapshotRequest request = createInstallSnapshotRequest();
final RpcRequests.InstallSnapshotResponse response = raftOptions.getRaftMessagesFactory()
.installSnapshotResponse()
.success(true)
.term(1)
.build();
assertEquals(-1, r.getWaitId());
Mockito.lenient().when(this.logManager.getTerm(11)).thenReturn(1L);
r.onRpcReturned(this.id, Replicator.RequestType.Snapshot, new Status(-1, "test"), request, response,
0, 0, -1);
assertNotNull(r.getBlockTimer());
assertEquals(-1, r.getWaitId());
}
@Test
public void testOnInstallSnapshotReturnedFailure() {
final Replicator r = getReplicator();
this.id.unlock();
assertNull(r.getBlockTimer());
final RpcRequests.InstallSnapshotRequest request = createInstallSnapshotRequest();
final RpcRequests.InstallSnapshotResponse response = raftOptions.getRaftMessagesFactory()
.installSnapshotResponse()
.success(false)
.term(1)
.build();
assertEquals(-1, r.getWaitId());
Mockito.lenient().when(this.logManager.getTerm(11)).thenReturn(1L);
r.onRpcReturned(this.id, Replicator.RequestType.Snapshot, Status.OK(), request, response, 0, 0, -1);
assertNotNull(r.getBlockTimer());
assertEquals(-1, r.getWaitId());
}
@Test
public void testOnRpcReturnedOutOfOrder() {
final Replicator r = getReplicator();
assertEquals(-1, r.getWaitId());
final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest();
final RpcRequests.AppendEntriesResponse response = raftOptions.getRaftMessagesFactory()
.appendEntriesResponse()
.success(true)
.lastLogIndex(10)
.term(1)
.build();
assertNull(r.getBlockTimer());
this.id.unlock();
assertTrue(r.getPendingResponses().isEmpty());
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 1, 0,
Utils.monotonicMs());
assertEquals(1, r.getPendingResponses().size());
r.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0,
Utils.monotonicMs());
assertTrue(r.getPendingResponses().isEmpty());
assertEquals(0, r.getWaitId());
assertEquals(11, r.getRealNextIndex());
assertEquals(1, r.getRequiredNextSeq());
}
private void mockSendEntries(@SuppressWarnings("SameParameterValue") final int n) {
final RpcRequests.AppendEntriesRequest request = createEntriesRequest(n);
Mockito.lenient().when(this.rpcService.appendEntries(eq(this.peerId), eq(request), eq(-1), Mockito.any()))
.thenReturn(new CompletableFuture<>());
}
private RpcRequests.AppendEntriesRequest createEntriesRequest(final int n) {
final AppendEntriesRequestBuilder rb = raftOptions.getRaftMessagesFactory()
.appendEntriesRequest()
.groupId("test")
.serverId(new PeerId("localhost", 8082).toString())
.peerId(this.peerId.toString())
.term(1)
.prevLogIndex(10)
.prevLogTerm(1)
.committedIndex(0);
List<RaftOutter.EntryMeta> entries = new ArrayList<>();
for (int i = 0; i < n; i++) {
final LogEntry log = new LogEntry(EnumOutter.EntryType.ENTRY_TYPE_DATA);
log.setData(ByteBuffer.wrap(new byte[i]));
log.setId(new LogId(i + 11, 1));
Mockito.when(this.logManager.getEntry(i + 11)).thenReturn(log);
Mockito.when(this.logManager.getTerm(i + 11)).thenReturn(1L);
entries.add(raftOptions.getRaftMessagesFactory()
.entryMeta()
.dataLen(i)
.term(1)
.type(EnumOutter.EntryType.ENTRY_TYPE_DATA)
.build());
}
rb.entriesList(entries);
return rb.build();
}
@Test
public void testGetNextSendIndex() {
final Replicator r = getReplicator();
assertEquals(-1, r.getNextSendIndex());
r.resetInflights();
assertEquals(11, r.getNextSendIndex());
mockSendEntries(3);
r.sendEntries();
assertEquals(14, r.getNextSendIndex());
}
private RpcRequests.InstallSnapshotRequest createInstallSnapshotRequest() {
final String uri = "remote://localhost:8081/99";
final RaftOutter.SnapshotMeta meta = raftOptions.getRaftMessagesFactory()
.snapshotMeta()
.lastIncludedIndex(11)
.lastIncludedTerm(1)
.build();
return raftOptions.getRaftMessagesFactory().installSnapshotRequest()
.term(this.opts.getTerm())
.groupId(this.opts.getGroupId())
.serverId(this.opts.getServerId().toString())
.peerId(this.opts.getPeerId().toString())
.meta(meta)
.uri(uri)
.build();
}
}
|
googleapis/google-api-java-client-services | 35,449 | clients/google-api-services-compute/v1/2.0.0/com/google/api/services/compute/model/Firewall.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Firewall Rule resource.
*
* Firewall rules allow or deny ingress traffic to, and egress traffic from your instances. For more
* information, readFirewall rules.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Firewall extends com.google.api.client.json.GenericJson {
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Allowed> allowed;
static {
// hack to force ProGuard to consider Allowed used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Allowed.class);
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Denied> denied;
static {
// hack to force ProGuard to consider Denied used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Denied.class);
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> destinationRanges;
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String direction;
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disabled;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallLogConfig logConfig;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String network;
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallParams params;
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer priority;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceRanges;
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceServiceAccounts;
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceTags;
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetServiceAccounts;
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetTags;
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @return value or {@code null} for none
*/
public java.util.List<Allowed> getAllowed() {
return allowed;
}
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @param allowed allowed or {@code null} for none
*/
public Firewall setAllowed(java.util.List<Allowed> allowed) {
this.allowed = allowed;
return this;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Firewall setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @return value or {@code null} for none
*/
public java.util.List<Denied> getDenied() {
return denied;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @param denied denied or {@code null} for none
*/
public Firewall setDenied(java.util.List<Denied> denied) {
this.denied = denied;
return this;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @param description description or {@code null} for none
*/
public Firewall setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getDestinationRanges() {
return destinationRanges;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @param destinationRanges destinationRanges or {@code null} for none
*/
public Firewall setDestinationRanges(java.util.List<java.lang.String> destinationRanges) {
this.destinationRanges = destinationRanges;
return this;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @return value or {@code null} for none
*/
public java.lang.String getDirection() {
return direction;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @param direction direction or {@code null} for none
*/
public Firewall setDirection(java.lang.String direction) {
this.direction = direction;
return this;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisabled() {
return disabled;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @param disabled disabled or {@code null} for none
*/
public Firewall setDisabled(java.lang.Boolean disabled) {
this.disabled = disabled;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Firewall setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @param kind kind or {@code null} for none
*/
public Firewall setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @return value or {@code null} for none
*/
public FirewallLogConfig getLogConfig() {
return logConfig;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @param logConfig logConfig or {@code null} for none
*/
public Firewall setLogConfig(FirewallLogConfig logConfig) {
this.logConfig = logConfig;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @param name name or {@code null} for none
*/
public Firewall setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @return value or {@code null} for none
*/
public java.lang.String getNetwork() {
return network;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @param network network or {@code null} for none
*/
public Firewall setNetwork(java.lang.String network) {
this.network = network;
return this;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @return value or {@code null} for none
*/
public FirewallParams getParams() {
return params;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @param params params or {@code null} for none
*/
public Firewall setParams(FirewallParams params) {
this.params = params;
return this;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @return value or {@code null} for none
*/
public java.lang.Integer getPriority() {
return priority;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @param priority priority or {@code null} for none
*/
public Firewall setPriority(java.lang.Integer priority) {
this.priority = priority;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Firewall setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceRanges() {
return sourceRanges;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @param sourceRanges sourceRanges or {@code null} for none
*/
public Firewall setSourceRanges(java.util.List<java.lang.String> sourceRanges) {
this.sourceRanges = sourceRanges;
return this;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceServiceAccounts() {
return sourceServiceAccounts;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @param sourceServiceAccounts sourceServiceAccounts or {@code null} for none
*/
public Firewall setSourceServiceAccounts(java.util.List<java.lang.String> sourceServiceAccounts) {
this.sourceServiceAccounts = sourceServiceAccounts;
return this;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceTags() {
return sourceTags;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @param sourceTags sourceTags or {@code null} for none
*/
public Firewall setSourceTags(java.util.List<java.lang.String> sourceTags) {
this.sourceTags = sourceTags;
return this;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetServiceAccounts() {
return targetServiceAccounts;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @param targetServiceAccounts targetServiceAccounts or {@code null} for none
*/
public Firewall setTargetServiceAccounts(java.util.List<java.lang.String> targetServiceAccounts) {
this.targetServiceAccounts = targetServiceAccounts;
return this;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetTags() {
return targetTags;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @param targetTags targetTags or {@code null} for none
*/
public Firewall setTargetTags(java.util.List<java.lang.String> targetTags) {
this.targetTags = targetTags;
return this;
}
@Override
public Firewall set(String fieldName, Object value) {
return (Firewall) super.set(fieldName, value);
}
@Override
public Firewall clone() {
return (Firewall) super.clone();
}
/**
* Model definition for FirewallAllowed.
*/
public static final class Allowed extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Allowed setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Allowed setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Allowed set(String fieldName, Object value) {
return (Allowed) super.set(fieldName, value);
}
@Override
public Allowed clone() {
return (Allowed) super.clone();
}
}
/**
* Model definition for FirewallDenied.
*/
public static final class Denied extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Denied setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Denied setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Denied set(String fieldName, Object value) {
return (Denied) super.set(fieldName, value);
}
@Override
public Denied clone() {
return (Denied) super.clone();
}
}
}
|
googleapis/google-cloud-java | 35,214 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ReceivePacketsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/streaming_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Response message from ReceivePackets.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ReceivePacketsResponse}
*/
public final class ReceivePacketsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ReceivePacketsResponse)
ReceivePacketsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReceivePacketsResponse.newBuilder() to construct.
private ReceivePacketsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ReceivePacketsResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ReceivePacketsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.StreamingServiceProto
.internal_static_google_cloud_visionai_v1_ReceivePacketsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.StreamingServiceProto
.internal_static_google_cloud_visionai_v1_ReceivePacketsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ReceivePacketsResponse.class,
com.google.cloud.visionai.v1.ReceivePacketsResponse.Builder.class);
}
private int responseCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object response_;
public enum ResponseCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
PACKET(1),
CONTROL(3),
RESPONSE_NOT_SET(0);
private final int value;
private ResponseCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ResponseCase valueOf(int value) {
return forNumber(value);
}
public static ResponseCase forNumber(int value) {
switch (value) {
case 1:
return PACKET;
case 3:
return CONTROL;
case 0:
return RESPONSE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ResponseCase getResponseCase() {
return ResponseCase.forNumber(responseCase_);
}
public static final int PACKET_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*
* @return Whether the packet field is set.
*/
@java.lang.Override
public boolean hasPacket() {
return responseCase_ == 1;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*
* @return The packet.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.Packet getPacket() {
if (responseCase_ == 1) {
return (com.google.cloud.visionai.v1.Packet) response_;
}
return com.google.cloud.visionai.v1.Packet.getDefaultInstance();
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.PacketOrBuilder getPacketOrBuilder() {
if (responseCase_ == 1) {
return (com.google.cloud.visionai.v1.Packet) response_;
}
return com.google.cloud.visionai.v1.Packet.getDefaultInstance();
}
public static final int CONTROL_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return responseCase_ == 3;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsControlResponse getControl() {
if (responseCase_ == 3) {
return (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_;
}
return com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsControlResponseOrBuilder getControlOrBuilder() {
if (responseCase_ == 3) {
return (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_;
}
return com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (responseCase_ == 1) {
output.writeMessage(1, (com.google.cloud.visionai.v1.Packet) response_);
}
if (responseCase_ == 3) {
output.writeMessage(
3, (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (responseCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.visionai.v1.Packet) response_);
}
if (responseCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ReceivePacketsResponse)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ReceivePacketsResponse other =
(com.google.cloud.visionai.v1.ReceivePacketsResponse) obj;
if (!getResponseCase().equals(other.getResponseCase())) return false;
switch (responseCase_) {
case 1:
if (!getPacket().equals(other.getPacket())) return false;
break;
case 3:
if (!getControl().equals(other.getControl())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (responseCase_) {
case 1:
hash = (37 * hash) + PACKET_FIELD_NUMBER;
hash = (53 * hash) + getPacket().hashCode();
break;
case 3:
hash = (37 * hash) + CONTROL_FIELD_NUMBER;
hash = (53 * hash) + getControl().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.ReceivePacketsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message from ReceivePackets.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ReceivePacketsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ReceivePacketsResponse)
com.google.cloud.visionai.v1.ReceivePacketsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.StreamingServiceProto
.internal_static_google_cloud_visionai_v1_ReceivePacketsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.StreamingServiceProto
.internal_static_google_cloud_visionai_v1_ReceivePacketsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ReceivePacketsResponse.class,
com.google.cloud.visionai.v1.ReceivePacketsResponse.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ReceivePacketsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (packetBuilder_ != null) {
packetBuilder_.clear();
}
if (controlBuilder_ != null) {
controlBuilder_.clear();
}
responseCase_ = 0;
response_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.StreamingServiceProto
.internal_static_google_cloud_visionai_v1_ReceivePacketsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsResponse getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ReceivePacketsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsResponse build() {
com.google.cloud.visionai.v1.ReceivePacketsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsResponse buildPartial() {
com.google.cloud.visionai.v1.ReceivePacketsResponse result =
new com.google.cloud.visionai.v1.ReceivePacketsResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.ReceivePacketsResponse result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.visionai.v1.ReceivePacketsResponse result) {
result.responseCase_ = responseCase_;
result.response_ = this.response_;
if (responseCase_ == 1 && packetBuilder_ != null) {
result.response_ = packetBuilder_.build();
}
if (responseCase_ == 3 && controlBuilder_ != null) {
result.response_ = controlBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ReceivePacketsResponse) {
return mergeFrom((com.google.cloud.visionai.v1.ReceivePacketsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ReceivePacketsResponse other) {
if (other == com.google.cloud.visionai.v1.ReceivePacketsResponse.getDefaultInstance())
return this;
switch (other.getResponseCase()) {
case PACKET:
{
mergePacket(other.getPacket());
break;
}
case CONTROL:
{
mergeControl(other.getControl());
break;
}
case RESPONSE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getPacketFieldBuilder().getBuilder(), extensionRegistry);
responseCase_ = 1;
break;
} // case 10
case 26:
{
input.readMessage(getControlFieldBuilder().getBuilder(), extensionRegistry);
responseCase_ = 3;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int responseCase_ = 0;
private java.lang.Object response_;
public ResponseCase getResponseCase() {
return ResponseCase.forNumber(responseCase_);
}
public Builder clearResponse() {
responseCase_ = 0;
response_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Packet,
com.google.cloud.visionai.v1.Packet.Builder,
com.google.cloud.visionai.v1.PacketOrBuilder>
packetBuilder_;
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*
* @return Whether the packet field is set.
*/
@java.lang.Override
public boolean hasPacket() {
return responseCase_ == 1;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*
* @return The packet.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.Packet getPacket() {
if (packetBuilder_ == null) {
if (responseCase_ == 1) {
return (com.google.cloud.visionai.v1.Packet) response_;
}
return com.google.cloud.visionai.v1.Packet.getDefaultInstance();
} else {
if (responseCase_ == 1) {
return packetBuilder_.getMessage();
}
return com.google.cloud.visionai.v1.Packet.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
public Builder setPacket(com.google.cloud.visionai.v1.Packet value) {
if (packetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
response_ = value;
onChanged();
} else {
packetBuilder_.setMessage(value);
}
responseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
public Builder setPacket(com.google.cloud.visionai.v1.Packet.Builder builderForValue) {
if (packetBuilder_ == null) {
response_ = builderForValue.build();
onChanged();
} else {
packetBuilder_.setMessage(builderForValue.build());
}
responseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
public Builder mergePacket(com.google.cloud.visionai.v1.Packet value) {
if (packetBuilder_ == null) {
if (responseCase_ == 1
&& response_ != com.google.cloud.visionai.v1.Packet.getDefaultInstance()) {
response_ =
com.google.cloud.visionai.v1.Packet.newBuilder(
(com.google.cloud.visionai.v1.Packet) response_)
.mergeFrom(value)
.buildPartial();
} else {
response_ = value;
}
onChanged();
} else {
if (responseCase_ == 1) {
packetBuilder_.mergeFrom(value);
} else {
packetBuilder_.setMessage(value);
}
}
responseCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
public Builder clearPacket() {
if (packetBuilder_ == null) {
if (responseCase_ == 1) {
responseCase_ = 0;
response_ = null;
onChanged();
}
} else {
if (responseCase_ == 1) {
responseCase_ = 0;
response_ = null;
}
packetBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
public com.google.cloud.visionai.v1.Packet.Builder getPacketBuilder() {
return getPacketFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.PacketOrBuilder getPacketOrBuilder() {
if ((responseCase_ == 1) && (packetBuilder_ != null)) {
return packetBuilder_.getMessageOrBuilder();
} else {
if (responseCase_ == 1) {
return (com.google.cloud.visionai.v1.Packet) response_;
}
return com.google.cloud.visionai.v1.Packet.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A genuine data payload originating from the sender.
* </pre>
*
* <code>.google.cloud.visionai.v1.Packet packet = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Packet,
com.google.cloud.visionai.v1.Packet.Builder,
com.google.cloud.visionai.v1.PacketOrBuilder>
getPacketFieldBuilder() {
if (packetBuilder_ == null) {
if (!(responseCase_ == 1)) {
response_ = com.google.cloud.visionai.v1.Packet.getDefaultInstance();
}
packetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.Packet,
com.google.cloud.visionai.v1.Packet.Builder,
com.google.cloud.visionai.v1.PacketOrBuilder>(
(com.google.cloud.visionai.v1.Packet) response_, getParentForChildren(), isClean());
response_ = null;
}
responseCase_ = 1;
onChanged();
return packetBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.ReceivePacketsControlResponse,
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.Builder,
com.google.cloud.visionai.v1.ReceivePacketsControlResponseOrBuilder>
controlBuilder_;
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return responseCase_ == 3;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsControlResponse getControl() {
if (controlBuilder_ == null) {
if (responseCase_ == 3) {
return (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_;
}
return com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
} else {
if (responseCase_ == 3) {
return controlBuilder_.getMessage();
}
return com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
public Builder setControl(com.google.cloud.visionai.v1.ReceivePacketsControlResponse value) {
if (controlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
response_ = value;
onChanged();
} else {
controlBuilder_.setMessage(value);
}
responseCase_ = 3;
return this;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
public Builder setControl(
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.Builder builderForValue) {
if (controlBuilder_ == null) {
response_ = builderForValue.build();
onChanged();
} else {
controlBuilder_.setMessage(builderForValue.build());
}
responseCase_ = 3;
return this;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
public Builder mergeControl(com.google.cloud.visionai.v1.ReceivePacketsControlResponse value) {
if (controlBuilder_ == null) {
if (responseCase_ == 3
&& response_
!= com.google.cloud.visionai.v1.ReceivePacketsControlResponse
.getDefaultInstance()) {
response_ =
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.newBuilder(
(com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_)
.mergeFrom(value)
.buildPartial();
} else {
response_ = value;
}
onChanged();
} else {
if (responseCase_ == 3) {
controlBuilder_.mergeFrom(value);
} else {
controlBuilder_.setMessage(value);
}
}
responseCase_ = 3;
return this;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
public Builder clearControl() {
if (controlBuilder_ == null) {
if (responseCase_ == 3) {
responseCase_ = 0;
response_ = null;
onChanged();
}
} else {
if (responseCase_ == 3) {
responseCase_ = 0;
response_ = null;
}
controlBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
public com.google.cloud.visionai.v1.ReceivePacketsControlResponse.Builder getControlBuilder() {
return getControlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsControlResponseOrBuilder
getControlOrBuilder() {
if ((responseCase_ == 3) && (controlBuilder_ != null)) {
return controlBuilder_.getMessageOrBuilder();
} else {
if (responseCase_ == 3) {
return (com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_;
}
return com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A control message from the server.
* </pre>
*
* <code>.google.cloud.visionai.v1.ReceivePacketsControlResponse control = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.ReceivePacketsControlResponse,
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.Builder,
com.google.cloud.visionai.v1.ReceivePacketsControlResponseOrBuilder>
getControlFieldBuilder() {
if (controlBuilder_ == null) {
if (!(responseCase_ == 3)) {
response_ =
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.getDefaultInstance();
}
controlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.ReceivePacketsControlResponse,
com.google.cloud.visionai.v1.ReceivePacketsControlResponse.Builder,
com.google.cloud.visionai.v1.ReceivePacketsControlResponseOrBuilder>(
(com.google.cloud.visionai.v1.ReceivePacketsControlResponse) response_,
getParentForChildren(),
isClean());
response_ = null;
}
responseCase_ = 3;
onChanged();
return controlBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ReceivePacketsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ReceivePacketsResponse)
private static final com.google.cloud.visionai.v1.ReceivePacketsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ReceivePacketsResponse();
}
public static com.google.cloud.visionai.v1.ReceivePacketsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ReceivePacketsResponse> PARSER =
new com.google.protobuf.AbstractParser<ReceivePacketsResponse>() {
@java.lang.Override
public ReceivePacketsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ReceivePacketsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ReceivePacketsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ReceivePacketsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,271 | java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/BatchCreateResourceValueConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v2/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v2;
/**
*
*
* <pre>
* Response message for BatchCreateResourceValueConfigs
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse}
*/
public final class BatchCreateResourceValueConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse)
BatchCreateResourceValueConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateResourceValueConfigsResponse.newBuilder() to construct.
private BatchCreateResourceValueConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateResourceValueConfigsResponse() {
resourceValueConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateResourceValueConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_BatchCreateResourceValueConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse.class,
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse.Builder
.class);
}
public static final int RESOURCE_VALUE_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securitycenter.v2.ResourceValueConfig>
resourceValueConfigs_;
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securitycenter.v2.ResourceValueConfig>
getResourceValueConfigsList() {
return resourceValueConfigs_;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder>
getResourceValueConfigsOrBuilderList() {
return resourceValueConfigs_;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public int getResourceValueConfigsCount() {
return resourceValueConfigs_.size();
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.ResourceValueConfig getResourceValueConfigs(int index) {
return resourceValueConfigs_.get(index);
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder
getResourceValueConfigsOrBuilder(int index) {
return resourceValueConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < resourceValueConfigs_.size(); i++) {
output.writeMessage(1, resourceValueConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < resourceValueConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, resourceValueConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse other =
(com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse) obj;
if (!getResourceValueConfigsList().equals(other.getResourceValueConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResourceValueConfigsCount() > 0) {
hash = (37 * hash) + RESOURCE_VALUE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getResourceValueConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for BatchCreateResourceValueConfigs
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse)
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_BatchCreateResourceValueConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse.class,
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse.Builder
.class);
}
// Construct using
// com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigs_ = java.util.Collections.emptyList();
} else {
resourceValueConfigs_ = null;
resourceValueConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_BatchCreateResourceValueConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse build() {
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
buildPartial() {
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse result =
new com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse result) {
if (resourceValueConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
resourceValueConfigs_ = java.util.Collections.unmodifiableList(resourceValueConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resourceValueConfigs_ = resourceValueConfigs_;
} else {
result.resourceValueConfigs_ = resourceValueConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse) {
return mergeFrom(
(com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse other) {
if (other
== com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
.getDefaultInstance()) return this;
if (resourceValueConfigsBuilder_ == null) {
if (!other.resourceValueConfigs_.isEmpty()) {
if (resourceValueConfigs_.isEmpty()) {
resourceValueConfigs_ = other.resourceValueConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.addAll(other.resourceValueConfigs_);
}
onChanged();
}
} else {
if (!other.resourceValueConfigs_.isEmpty()) {
if (resourceValueConfigsBuilder_.isEmpty()) {
resourceValueConfigsBuilder_.dispose();
resourceValueConfigsBuilder_ = null;
resourceValueConfigs_ = other.resourceValueConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
resourceValueConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResourceValueConfigsFieldBuilder()
: null;
} else {
resourceValueConfigsBuilder_.addAllMessages(other.resourceValueConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securitycenter.v2.ResourceValueConfig m =
input.readMessage(
com.google.cloud.securitycenter.v2.ResourceValueConfig.parser(),
extensionRegistry);
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(m);
} else {
resourceValueConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securitycenter.v2.ResourceValueConfig>
resourceValueConfigs_ = java.util.Collections.emptyList();
private void ensureResourceValueConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
resourceValueConfigs_ =
new java.util.ArrayList<com.google.cloud.securitycenter.v2.ResourceValueConfig>(
resourceValueConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.ResourceValueConfig,
com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder>
resourceValueConfigsBuilder_;
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.securitycenter.v2.ResourceValueConfig>
getResourceValueConfigsList() {
if (resourceValueConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(resourceValueConfigs_);
} else {
return resourceValueConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public int getResourceValueConfigsCount() {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.size();
} else {
return resourceValueConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v2.ResourceValueConfig getResourceValueConfigs(
int index) {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.get(index);
} else {
return resourceValueConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder setResourceValueConfigs(
int index, com.google.cloud.securitycenter.v2.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.set(index, value);
onChanged();
} else {
resourceValueConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder setResourceValueConfigs(
int index, com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.set(index, builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
com.google.cloud.securitycenter.v2.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(value);
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
int index, com.google.cloud.securitycenter.v2.ResourceValueConfig value) {
if (resourceValueConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(index, value);
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addResourceValueConfigs(
int index, com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder builderForValue) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.add(index, builderForValue.build());
onChanged();
} else {
resourceValueConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder addAllResourceValueConfigs(
java.lang.Iterable<? extends com.google.cloud.securitycenter.v2.ResourceValueConfig>
values) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resourceValueConfigs_);
onChanged();
} else {
resourceValueConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder clearResourceValueConfigs() {
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resourceValueConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public Builder removeResourceValueConfigs(int index) {
if (resourceValueConfigsBuilder_ == null) {
ensureResourceValueConfigsIsMutable();
resourceValueConfigs_.remove(index);
onChanged();
} else {
resourceValueConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder
getResourceValueConfigsBuilder(int index) {
return getResourceValueConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder
getResourceValueConfigsOrBuilder(int index) {
if (resourceValueConfigsBuilder_ == null) {
return resourceValueConfigs_.get(index);
} else {
return resourceValueConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<? extends com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder>
getResourceValueConfigsOrBuilderList() {
if (resourceValueConfigsBuilder_ != null) {
return resourceValueConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resourceValueConfigs_);
}
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder
addResourceValueConfigsBuilder() {
return getResourceValueConfigsFieldBuilder()
.addBuilder(com.google.cloud.securitycenter.v2.ResourceValueConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder
addResourceValueConfigsBuilder(int index) {
return getResourceValueConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.securitycenter.v2.ResourceValueConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The resource value configs created
* </pre>
*
* <code>
* repeated .google.cloud.securitycenter.v2.ResourceValueConfig resource_value_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder>
getResourceValueConfigsBuilderList() {
return getResourceValueConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.ResourceValueConfig,
com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder>
getResourceValueConfigsFieldBuilder() {
if (resourceValueConfigsBuilder_ == null) {
resourceValueConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.ResourceValueConfig,
com.google.cloud.securitycenter.v2.ResourceValueConfig.Builder,
com.google.cloud.securitycenter.v2.ResourceValueConfigOrBuilder>(
resourceValueConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
resourceValueConfigs_ = null;
}
return resourceValueConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse)
private static final com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse();
}
public static com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateResourceValueConfigsResponse>() {
@java.lang.Override
public BatchCreateResourceValueConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateResourceValueConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.BatchCreateResourceValueConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hive | 35,436 | ql/src/java/org/apache/hadoop/hive/ql/exec/vector/mapjoin/VectorMapJoinCommonOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector.mapjoin;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map.Entry;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.HashTableLoaderFactory;
import org.apache.hadoop.hive.ql.exec.HashTableLoader;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorColumnMapping;
import org.apache.hadoop.hive.ql.exec.vector.VectorColumnOutputMapping;
import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping;
import org.apache.hadoop.hive.ql.exec.vector.VectorCopyRow;
import org.apache.hadoop.hive.ql.exec.vector.VectorDeserializeRow;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion;
import org.apache.hadoop.hive.ql.exec.vector.VectorizationOperator;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.optimized.VectorMapJoinOptimizedCreateHashTable;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinHashTable;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.hashtable.VectorMapJoinTableContainer;
import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastHashTableLoader;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.VectorDesc;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableImplementationType;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKeyType;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.HashTableKind;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc.VectorMapJoinVariation;
import org.apache.hadoop.hive.ql.plan.VectorMapJoinInfo;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead;
import org.apache.hadoop.hive.serde2.lazybinary.fast.LazyBinaryDeserializeRead;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
* This class is common operator class for native vectorized map join.
*
* It contain common initialization logic.
*
* It is used by both inner and outer joins.
*/
public abstract class VectorMapJoinCommonOperator extends MapJoinOperator
implements VectorizationOperator, VectorizationContextRegion {
private static final long serialVersionUID = 1L;
//------------------------------------------------------------------------------------------------
private static final String CLASS_NAME = VectorMapJoinCommonOperator.class.getName();
private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
protected abstract String getLoggingPrefix();
// For debug tracing: information about the map or reduce task, operator, operator class, etc.
protected transient String loggingPrefix;
protected String getLoggingPrefix(String className) {
if (loggingPrefix == null) {
initLoggingPrefix(className);
}
return loggingPrefix;
}
protected void initLoggingPrefix(String className) {
loggingPrefix = className;
}
//------------------------------------------------------------------------------------------------
protected VectorMapJoinDesc vectorDesc;
protected VectorMapJoinInfo vectorMapJoinInfo;
// Whether this operator is an outer join.
protected boolean isOuterJoin;
// Position of the *single* native vector map join small table.
protected byte posSingleVectorMapJoinSmallTable;
// The incoming vectorization context. It describes the input big table vectorized row batch.
protected VectorizationContext vContext;
// This is the vectorized row batch description of the output of the native vectorized map join
// operator. It is based on the incoming vectorization context. Its projection may include
// a mixture of input big table columns and new scratch columns.
protected VectorizationContext vOutContext;
protected VectorMapJoinVariation vectorMapJoinVariation;
protected HashTableKind hashTableKind;
protected HashTableKeyType hashTableKeyType;
// The output column projection of the vectorized row batch. And, the type infos of the output
// columns.
protected int[] outputProjection;
protected TypeInfo[] outputTypeInfos;
// These are the vectorized batch expressions for filtering, key expressions, and value
// expressions.
protected VectorExpression[] bigTableFilterExpressions;
protected VectorExpression[] bigTableKeyExpressions;
protected VectorExpression[] bigTableValueExpressions;
// This is map of which vectorized row batch columns are the big table key columns. Since
// we may have key expressions that produce new scratch columns, we need a mapping.
// And, we have their type infos.
protected int[] bigTableKeyColumnMap;
protected String[] bigTableKeyColumnNames;
protected TypeInfo[] bigTableKeyTypeInfos;
// Similarly, this is map of which vectorized row batch columns are the big table value columns.
// Since we may have value expressions that produce new scratch columns, we need a mapping.
// And, we have their type infos.
protected int[] bigTableValueColumnMap;
protected String[] bigTableValueColumnNames;
protected TypeInfo[] bigTableValueTypeInfos;
/*
* NOTE:
* The Big Table key columns are from the key expressions.
* The Big Table value columns are from the getExpr(posBigTable) expressions.
* Any calculations needed for those will be scratch columns.
*
* The Small Table key and value output columns are scratch columns.
*
* Big Table Retain Column Map / TypeInfos:
* Any Big Table Batch columns that will be in the output result.
* 0, 1, ore more Column Nums and TypeInfos
*
* Non Outer Small Table Key Mapping:
* For non-[FULL] OUTER MapJoin, when Big Table key columns are not retained for the output
* result but are needed for the Small Table output result, they are put in this mapping
* as they are required for copying rows to the overflow batch.
*
* Outer Small Table Key Mapping
* For [FULL] OUTER MapJoin, the mapping for any Small Table key columns needed for the
* output result from the Big Table key columns. The Big Table keys cannot be projected since
* on NOMATCH there must be a physical column present to hold the non-match NULL.
*
* Full Outer Small Table Key Mapping
* For FULL OUTER MapJoin, the mapping from any needed Small Table key columns to their area
* in the output result.
*
* For deserializing a FULL OUTER non-match Small Table key into the output result.
* Can be partial or empty if some or all Small Table key columns are not retained.
*
* Small Table Value Mapping
* The mapping from Small Table value columns to their area in the output result.
*
* For deserializing Small Table value into the output result.
*
* It is the Small Table value index to output column numbers and TypeInfos.
* That is, a mapping of the LazyBinary field order to output batch scratch columns for the
* small table portion.
* Or, to use the output column nums for OUTER Small Table value NULLs.
*
*/
protected int[] bigTableRetainColumnMap;
protected TypeInfo[] bigTableRetainTypeInfos;
protected int[] nonOuterSmallTableKeyColumnMap;
protected TypeInfo[] nonOuterSmallTableKeyTypeInfos;
protected VectorColumnOutputMapping outerSmallTableKeyMapping;
protected VectorColumnSourceMapping fullOuterSmallTableKeyMapping;
protected VectorColumnSourceMapping smallTableValueMapping;
// The MapJoin output result projection for both the Big Table input batch and the overflow batch.
protected VectorColumnSourceMapping projectionMapping;
// These are the output columns for the small table and the outer small table keys.
protected int[] outerSmallTableKeyColumnMap;
protected int[] smallTableValueColumnMap;
// These are the columns in the big and small table that are ByteColumnVector columns.
// We create data buffers for these columns so we can copy strings into those columns by value.
protected int[] bigTableByteColumnVectorColumns;
protected int[] nonOuterSmallTableKeyByteColumnVectorColumns;
protected int[] outerSmallTableKeyByteColumnVectorColumns;
protected int[] smallTableByteColumnVectorColumns;
// The above members are initialized by the constructor and must not be
// transient.
//---------------------------------------------------------------------------
// The threshold where we should use a repeating vectorized row batch optimization for
// generating join output results.
protected transient boolean useOverflowRepeatedThreshold;
protected transient int overflowRepeatedThreshold;
// A helper object that efficiently copies the big table columns that are for the big table
// portion of the join output.
protected transient VectorCopyRow bigTableRetainedVectorCopy;
// This helper object deserializes BinarySortable format small table keys into columns of a row
// in a vectorized row batch.
protected int[] allSmallTableKeyColumnNums;
protected boolean[] allSmallTableKeyColumnIncluded;
protected transient VectorDeserializeRow<BinarySortableDeserializeRead> smallTableKeyOuterVectorDeserializeRow;
protected transient VectorCopyRow nonOuterSmallTableKeyVectorCopy;
// UNDONE
// A helper object that efficiently copies the big table key columns (input or key expressions)
// that appear in the small table portion of the join output.
protected transient VectorCopyRow outerSmallTableKeyVectorCopy;
// This helper object deserializes LazyBinary format small table values into columns of a row
// in a vectorized row batch.
protected transient VectorDeserializeRow<LazyBinaryDeserializeRead> smallTableValueVectorDeserializeRow;
// This a 2nd batch with the same "column schema" as the big table batch that can be used to
// build join output results in. If we can create some join output results in the big table
// batch, we will for better efficiency (i.e. avoiding copying). Otherwise, we will use the
// overflow batch.
protected transient VectorizedRowBatch overflowBatch;
// A scratch batch that will be used to play back big table rows that were spilled
// to disk for the Hybrid Grace hash partitioning.
protected transient VectorizedRowBatch spillReplayBatch;
// Whether the native vectorized map join operator has performed its common setup.
protected transient boolean needCommonSetup;
// Whether the native vectorized map join operator has performed its first batch setup.
protected transient boolean needFirstBatchSetup;
// Whether the native vectorized map join operator has performed its
// native vector map join hash table setup.
protected transient boolean needHashTableSetup;
// The small table hash table for the native vectorized map join operator.
protected transient VectorMapJoinHashTable vectorMapJoinHashTable;
protected transient long batchCounter;
protected transient long rowCounter;
/** Kryo ctor. */
protected VectorMapJoinCommonOperator() {
super();
}
public VectorMapJoinCommonOperator(CompilationOpContext ctx) {
super(ctx);
}
public VectorMapJoinCommonOperator(CompilationOpContext ctx, OperatorDesc conf,
VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException {
super(ctx);
MapJoinDesc desc = (MapJoinDesc) conf;
this.conf = desc;
this.vectorDesc = (VectorMapJoinDesc) vectorDesc;
vectorMapJoinInfo = this.vectorDesc.getVectorMapJoinInfo();
Preconditions.checkState(vectorMapJoinInfo != null);
this.vContext = vContext;
/*
* Create a new vectorization context to create a new projection, but keep
* same output column manager must be inherited to track the scratch the columns.
*/
vOutContext = new VectorizationContext(getName(), this.vContext);
order = desc.getTagOrder();
posBigTable = (byte) desc.getPosBigTable();
posSingleVectorMapJoinSmallTable = (order[0] == posBigTable ? order[1] : order[0]);
isOuterJoin = !desc.getNoOuterJoin();
vectorMapJoinVariation = this.vectorDesc.getVectorMapJoinVariation();
hashTableKind = this.vectorDesc.getHashTableKind();
hashTableKeyType = this.vectorDesc.getHashTableKeyType();
bigTableKeyColumnMap = vectorMapJoinInfo.getBigTableKeyColumnMap();
bigTableKeyColumnNames = vectorMapJoinInfo.getBigTableKeyColumnNames();
bigTableKeyTypeInfos = vectorMapJoinInfo.getBigTableKeyTypeInfos();
bigTableKeyExpressions = vectorMapJoinInfo.getSlimmedBigTableKeyExpressions();
bigTableValueColumnMap = vectorMapJoinInfo.getBigTableValueColumnMap();
bigTableValueColumnNames = vectorMapJoinInfo.getBigTableValueColumnNames();
bigTableValueTypeInfos = vectorMapJoinInfo.getBigTableValueTypeInfos();
bigTableValueExpressions = vectorMapJoinInfo.getSlimmedBigTableValueExpressions();
bigTableFilterExpressions = vectorMapJoinInfo.getBigTableFilterExpressions();
bigTableRetainColumnMap = vectorMapJoinInfo.getBigTableRetainColumnMap();
bigTableRetainTypeInfos = vectorMapJoinInfo.getBigTableRetainTypeInfos();
nonOuterSmallTableKeyColumnMap = vectorMapJoinInfo.getNonOuterSmallTableKeyColumnMap();
nonOuterSmallTableKeyTypeInfos = vectorMapJoinInfo.getNonOuterSmallTableKeyTypeInfos();
outerSmallTableKeyMapping = vectorMapJoinInfo.getOuterSmallTableKeyMapping();
fullOuterSmallTableKeyMapping = vectorMapJoinInfo.getFullOuterSmallTableKeyMapping();
smallTableValueMapping = vectorMapJoinInfo.getSmallTableValueMapping();
projectionMapping = vectorMapJoinInfo.getProjectionMapping();
determineCommonInfo(isOuterJoin);
}
protected void determineCommonInfo(boolean isOuter) throws HiveException {
outerSmallTableKeyColumnMap = outerSmallTableKeyMapping.getOutputColumns();
smallTableValueColumnMap = smallTableValueMapping.getOutputColumns();
// Which big table and small table columns are ByteColumnVector and need have their data buffer
// to be manually reset for some join result processing?
bigTableByteColumnVectorColumns =
getByteColumnVectorColumns(bigTableRetainColumnMap, bigTableRetainTypeInfos);
nonOuterSmallTableKeyByteColumnVectorColumns =
getByteColumnVectorColumns(nonOuterSmallTableKeyColumnMap, nonOuterSmallTableKeyTypeInfos);
outerSmallTableKeyByteColumnVectorColumns =
getByteColumnVectorColumns(outerSmallTableKeyMapping);
smallTableByteColumnVectorColumns =
getByteColumnVectorColumns(smallTableValueMapping);
outputProjection = projectionMapping.getOutputColumns();
outputTypeInfos = projectionMapping.getTypeInfos();
if (LOG.isInfoEnabled()) {
int[] orderDisplayable = new int[order.length];
for (int i = 0; i < order.length; i++) {
orderDisplayable[i] = (int) order[i];
}
LOG.info(getLoggingPrefix() + " order " +
Arrays.toString(orderDisplayable));
LOG.info(getLoggingPrefix() + " posBigTable " +
(int) posBigTable);
LOG.info(getLoggingPrefix() + " posSingleVectorMapJoinSmallTable " +
(int) posSingleVectorMapJoinSmallTable);
LOG.info(getLoggingPrefix() + " bigTableKeyColumnMap " +
Arrays.toString(bigTableKeyColumnMap));
LOG.info(getLoggingPrefix() + " bigTableKeyColumnNames " +
Arrays.toString(bigTableKeyColumnNames));
LOG.info(getLoggingPrefix() + " bigTableKeyTypeInfos " +
Arrays.toString(bigTableKeyTypeInfos));
LOG.info(getLoggingPrefix() + " bigTableValueColumnMap " +
Arrays.toString(bigTableValueColumnMap));
LOG.info(getLoggingPrefix() + " bigTableValueColumnNames " +
Arrays.toString(bigTableValueColumnNames));
LOG.info(getLoggingPrefix() + " bigTableValueTypeNames " +
Arrays.toString(bigTableValueTypeInfos));
LOG.info(getLoggingPrefix() + " getBigTableRetainColumnMap " +
Arrays.toString(bigTableRetainColumnMap));
LOG.info(getLoggingPrefix() + " bigTableRetainTypeInfos " +
Arrays.toString(bigTableRetainTypeInfos));
LOG.info(getLoggingPrefix() + " nonOuterSmallTableKeyColumnMap " +
Arrays.toString(nonOuterSmallTableKeyColumnMap));
LOG.info(getLoggingPrefix() + " nonOuterSmallTableKeyTypeInfos " +
Arrays.toString(nonOuterSmallTableKeyTypeInfos));
LOG.info(getLoggingPrefix() + " outerSmallTableKeyMapping " +
outerSmallTableKeyMapping.toString());
LOG.info(getLoggingPrefix() + " fullOuterSmallTableKeyMapping " +
fullOuterSmallTableKeyMapping.toString());
LOG.info(getLoggingPrefix() + " smallTableValueMapping " +
smallTableValueMapping.toString());
LOG.info(getLoggingPrefix() + " bigTableByteColumnVectorColumns " +
Arrays.toString(bigTableByteColumnVectorColumns));
LOG.info(getLoggingPrefix() + " smallTableByteColumnVectorColumns " +
Arrays.toString(smallTableByteColumnVectorColumns));
LOG.info(getLoggingPrefix() + " outputProjection " +
Arrays.toString(outputProjection));
LOG.info(getLoggingPrefix() + " outputTypeInfos " +
Arrays.toString(outputTypeInfos));
LOG.info(getLoggingPrefix() + " mapJoinDesc.getKeysString " +
conf.getKeysString());
if (conf.getValueIndices() != null) {
for (Entry<Byte, int[]> entry : conf.getValueIndices().entrySet()) {
LOG.info(getLoggingPrefix() + " mapJoinDesc.getValueIndices +"
+ (int) entry.getKey() + " " + Arrays.toString(entry.getValue()));
}
}
LOG.info(getLoggingPrefix() + " mapJoinDesc.getExprs " +
conf.getExprs().toString());
LOG.info(getLoggingPrefix() + " mapJoinDesc.getRetainList " +
conf.getRetainList().toString());
}
setupVOutContext(conf.getOutputColumnNames());
}
/**
* Determine from a mapping which columns are BytesColumnVector columns.
*/
private int[] getByteColumnVectorColumns(VectorColumnMapping mapping) {
return getByteColumnVectorColumns(mapping.getOutputColumns(), mapping.getTypeInfos());
}
private int[] getByteColumnVectorColumns(int[] outputColumns, TypeInfo[] typeInfos) {
// Search mapping for any strings and return their output columns.
ArrayList<Integer> list = new ArrayList<Integer>();
final int count = outputColumns.length;
for (int i = 0; i < count; i++) {
int outputColumn = outputColumns[i];
String typeName = typeInfos[i].getTypeName();
if (VectorizationContext.isStringFamily(typeName)) {
list.add(outputColumn);
}
}
return ArrayUtils.toPrimitive(list.toArray(new Integer[0]));
}
/**
* Setup the vectorized row batch description of the output of the native vectorized map join
* operator. Use the output projection we previously built from a mixture of input big table
* columns and new scratch columns.
*/
protected void setupVOutContext(List<String> outputColumnNames) {
if (LOG.isDebugEnabled()) {
LOG.debug(getLoggingPrefix() + " outputColumnNames " + outputColumnNames);
}
if (outputColumnNames.size() != outputProjection.length) {
throw new RuntimeException("Output column names " + outputColumnNames +
" length and output projection " + Arrays.toString(outputProjection) +
" / " + Arrays.toString(outputTypeInfos) + " length mismatch");
}
vOutContext.resetProjectionColumns();
for (int i = 0; i < outputColumnNames.size(); ++i) {
String columnName = outputColumnNames.get(i);
int outputColumn = outputProjection[i];
vOutContext.addProjectionColumn(columnName, outputColumn);
if (LOG.isDebugEnabled()) {
LOG.debug(getLoggingPrefix() + " addProjectionColumn " + i + " columnName " + columnName +
" outputColumn " + outputColumn);
}
}
}
/**
* This override lets us substitute our own fast vectorized hash table loader.
*/
@Override
protected HashTableLoader getHashTableLoader(Configuration hconf) {
HashTableImplementationType hashTableImplementationType = vectorDesc.getHashTableImplementationType();
HashTableLoader hashTableLoader;
switch (vectorDesc.getHashTableImplementationType()) {
case OPTIMIZED:
// Use the Tez hash table loader.
hashTableLoader = HashTableLoaderFactory.getLoader(hconf);
break;
case FAST:
// Use our specialized hash table loader.
hashTableLoader = new VectorMapJoinFastHashTableLoader();
break;
default:
throw new RuntimeException("Unknown vector map join hash table implementation type " + hashTableImplementationType.name());
}
return hashTableLoader;
}
/*
* Do FULL OUTER MapJoin operator initialization.
*/
private void initializeFullOuterObjects() throws HiveException {
// The Small Table key type jnfo is the same as Big Table's.
TypeInfo[] smallTableKeyTypeInfos = bigTableKeyTypeInfos;
final int allKeysSize = smallTableKeyTypeInfos.length;
/*
* The VectorMapJoinFullOuter{Long|MultiKey|String}Operator outputs 0, 1, or more
* Small Key columns in the join result.
*/
allSmallTableKeyColumnNums = new int[allKeysSize];
Arrays.fill(allSmallTableKeyColumnNums, -1);
allSmallTableKeyColumnIncluded = new boolean[allKeysSize];
final int outputKeysSize = fullOuterSmallTableKeyMapping.getCount();
int[] outputKeyNums = fullOuterSmallTableKeyMapping.getInputColumns();
int[] outputKeyOutputColumns = fullOuterSmallTableKeyMapping.getOutputColumns();
for (int i = 0; i < outputKeysSize; i++) {
final int outputKeyNum = outputKeyNums[i];
allSmallTableKeyColumnNums[outputKeyNum] = outputKeyOutputColumns[i];
allSmallTableKeyColumnIncluded[outputKeyNum] = true;
}
if (hashTableKeyType == HashTableKeyType.MULTI_KEY &&
outputKeysSize > 0) {
smallTableKeyOuterVectorDeserializeRow =
new VectorDeserializeRow<BinarySortableDeserializeRead>(BinarySortableDeserializeRead.with(
smallTableKeyTypeInfos, true, getConf().getKeyTblDesc().getProperties()));
smallTableKeyOuterVectorDeserializeRow.init(
allSmallTableKeyColumnNums, allSmallTableKeyColumnIncluded);
}
}
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
VectorExpression.doTransientInit(bigTableFilterExpressions, hconf);
VectorExpression.doTransientInit(bigTableKeyExpressions, hconf);
VectorExpression.doTransientInit(bigTableValueExpressions, hconf);
VectorExpression.doTransientInit(bigTableValueExpressions, hconf);
/*
* Get configuration parameters.
*/
overflowRepeatedThreshold = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD);
useOverflowRepeatedThreshold = (overflowRepeatedThreshold >= 0);
/*
* Create our vectorized copy row and deserialize row helper objects.
*/
if (vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) {
initializeFullOuterObjects();
}
if (smallTableValueMapping.getCount() > 0) {
smallTableValueVectorDeserializeRow =
new VectorDeserializeRow<LazyBinaryDeserializeRead>(
new LazyBinaryDeserializeRead(
smallTableValueMapping.getTypeInfos(),
/* useExternalBuffer */ true));
smallTableValueVectorDeserializeRow.init(smallTableValueMapping.getOutputColumns());
}
if (bigTableRetainColumnMap.length > 0) {
bigTableRetainedVectorCopy = new VectorCopyRow();
bigTableRetainedVectorCopy.init(
bigTableRetainColumnMap, bigTableRetainTypeInfos);
}
if (nonOuterSmallTableKeyColumnMap.length > 0) {
nonOuterSmallTableKeyVectorCopy = new VectorCopyRow();
nonOuterSmallTableKeyVectorCopy.init(
nonOuterSmallTableKeyColumnMap, nonOuterSmallTableKeyTypeInfos);
}
if (outerSmallTableKeyMapping.getCount() > 0) {
outerSmallTableKeyVectorCopy = new VectorCopyRow();
outerSmallTableKeyVectorCopy.init(outerSmallTableKeyMapping);
}
/*
* Setup the overflow batch.
*/
overflowBatch = setupOverflowBatch();
needCommonSetup = true;
needFirstBatchSetup = true;
needHashTableSetup = true;
if (LOG.isDebugEnabled()) {
int[] currentScratchColumns = vOutContext.currentScratchColumns();
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp currentScratchColumns " + Arrays.toString(currentScratchColumns));
StructObjectInspector structOutputObjectInspector = (StructObjectInspector) outputObjInspector;
List<? extends StructField> fields = structOutputObjectInspector.getAllStructFieldRefs();
int i = 0;
for (StructField field : fields) {
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp " + i + " field " + field.getFieldName() + " type " + field.getFieldObjectInspector().getTypeName());
i++;
}
}
}
@Override
protected void completeInitializationOp(Object[] os) throws HiveException {
// setup mapJoinTables and serdes
super.completeInitializationOp(os);
if (isTestingNoHashTableLoad) {
return;
}
MapJoinTableContainer mapJoinTableContainer =
mapJoinTables[posSingleVectorMapJoinSmallTable];
setUpHashTable();
}
@VisibleForTesting
@Override
public void setTestMapJoinTableContainer(int posSmallTable,
MapJoinTableContainer testMapJoinTableContainer,
MapJoinTableContainerSerDe mapJoinTableContainerSerDe) {
mapJoinTables[posSingleVectorMapJoinSmallTable] = testMapJoinTableContainer;
setUpHashTable();
}
private void setUpHashTable() {
HashTableImplementationType hashTableImplementationType = vectorDesc.getHashTableImplementationType();
switch (vectorDesc.getHashTableImplementationType()) {
case OPTIMIZED:
{
// Create our vector map join optimized hash table variation *above* the
// map join table container.
vectorMapJoinHashTable = VectorMapJoinOptimizedCreateHashTable.createHashTable(conf,
mapJoinTables[posSingleVectorMapJoinSmallTable]);
}
break;
case FAST:
{
// Get our vector map join fast hash table variation from the
// vector map join table container.
VectorMapJoinTableContainer vectorMapJoinTableContainer =
(VectorMapJoinTableContainer) mapJoinTables[posSingleVectorMapJoinSmallTable];
vectorMapJoinHashTable = vectorMapJoinTableContainer.vectorMapJoinHashTable();
}
break;
default:
throw new RuntimeException("Unknown vector map join hash table implementation type " + hashTableImplementationType.name());
}
LOG.info("Using " + vectorMapJoinHashTable.getClass().getSimpleName() + " from " + this.getClass().getSimpleName());
}
/*
* Setup our 2nd batch with the same "column schema" as the big table batch that can be used to
* build join output results in.
*/
protected VectorizedRowBatch setupOverflowBatch() throws HiveException {
int initialColumnCount = vContext.firstOutputColumnIndex();
VectorizedRowBatch overflowBatch;
int totalNumColumns = initialColumnCount + vOutContext.getScratchColumnTypeNames().length;
overflowBatch = new VectorizedRowBatch(totalNumColumns);
// First, just allocate just the projection columns we will be using.
for (int i = 0; i < outputProjection.length; i++) {
int outputColumn = outputProjection[i];
String typeName = outputTypeInfos[i].getTypeName();
allocateOverflowBatchColumnVector(overflowBatch, outputColumn, typeName, vOutContext.getDataTypePhysicalVariation(outputColumn));
}
// Now, add any scratch columns needed for children operators.
int outputColumn = initialColumnCount;
for (String typeName : vOutContext.getScratchColumnTypeNames()) {
allocateOverflowBatchColumnVector(overflowBatch, outputColumn, typeName, vOutContext.getDataTypePhysicalVariation(outputColumn++));
}
overflowBatch.projectedColumns = outputProjection;
overflowBatch.projectionSize = outputProjection.length;
overflowBatch.reset();
return overflowBatch;
}
/*
* Allocate overflow batch columns by hand.
*/
private void allocateOverflowBatchColumnVector(VectorizedRowBatch overflowBatch, int outputColumn,
String typeName,
DataTypePhysicalVariation dataTypePhysicalVariation) {
if (overflowBatch.cols[outputColumn] == null) {
typeName = VectorizationContext.mapTypeNameSynonyms(typeName);
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
overflowBatch.cols[outputColumn] = VectorizedBatchUtil.createColumnVector(typeInfo, dataTypePhysicalVariation);
if (LOG.isDebugEnabled()) {
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp overflowBatch outputColumn " + outputColumn + " class " + overflowBatch.cols[outputColumn].getClass().getSimpleName());
}
}
}
/*
* Common one time setup for Native Vector MapJoin operator.
*/
protected void commonSetup() throws HiveException {
/*
* Make sure big table BytesColumnVectors have room for string values in the overflow batch...
*/
for (int column: bigTableByteColumnVectorColumns) {
BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column];
bytesColumnVector.initBuffer();
}
for (int column : nonOuterSmallTableKeyByteColumnVectorColumns) {
BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column];
bytesColumnVector.initBuffer();
}
for (int column : outerSmallTableKeyByteColumnVectorColumns) {
BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column];
bytesColumnVector.initBuffer();
}
for (int column: smallTableByteColumnVectorColumns) {
BytesColumnVector bytesColumnVector = (BytesColumnVector) overflowBatch.cols[column];
bytesColumnVector.initBuffer();
}
batchCounter = 0;
rowCounter = 0;
}
/*
* Common one time setup by native vectorized map join operator's first batch.
*/
public void firstBatchSetup(VectorizedRowBatch batch) throws HiveException {
// Make sure small table BytesColumnVectors have room for string values in the big table and
// overflow batchs...
for (int column: smallTableByteColumnVectorColumns) {
BytesColumnVector bytesColumnVector = (BytesColumnVector) batch.cols[column];
bytesColumnVector.initBuffer();
}
// Setup a scratch batch that will be used to play back big table rows that were spilled
// to disk for the Hybrid Grace hash partitioning.
spillReplayBatch = VectorizedBatchUtil.makeLike(batch);
}
/*
* Perform any Native Vector MapJoin operator specific hash table setup.
*/
public void hashTableSetup() throws HiveException {
}
/*
* Perform the Native Vector MapJoin operator work.
*/
public abstract void processBatch(VectorizedRowBatch batch) throws HiveException;
/*
* Common process method for all Native Vector MapJoin operators.
*
* Do common initialization work and invoke the override-able common setup methods.
*
* Then, invoke the processBatch override method to do the operator work.
*/
@Override
public void process(Object row, int tag) throws HiveException {
VectorizedRowBatch batch = (VectorizedRowBatch) row;
alias = (byte) tag;
if (needCommonSetup) {
// Our one time process method initialization.
commonSetup();
needCommonSetup = false;
}
if (needFirstBatchSetup) {
// Our one time first-batch method initialization.
firstBatchSetup(batch);
needFirstBatchSetup = false;
}
if (needHashTableSetup) {
// Setup our hash table specialization. It will be the first time the process
// method is called, or after a Hybrid Grace reload.
hashTableSetup();
needHashTableSetup = false;
}
batchCounter++;
if (batch.size == 0) {
return;
}
rowCounter += batch.size;
processBatch(batch);
}
protected void displayBatchColumns(VectorizedRowBatch batch, String batchName) {
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator commonSetup " + batchName + " column count " + batch.numCols);
for (int column = 0; column < batch.numCols; column++) {
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator commonSetup " + batchName + " column " + column + " type " + (batch.cols[column] == null ? "NULL" : batch.cols[column].getClass().getSimpleName()));
}
}
@Override
public OperatorType getType() {
return OperatorType.MAPJOIN;
}
@Override
public VectorizationContext getInputVectorizationContext() {
return vContext;
}
@Override
public VectorDesc getVectorDesc() {
return vectorDesc;
}
@Override
public VectorizationContext getOutputVectorizationContext() {
return vOutContext;
}
}
|
apache/ignite | 35,166 | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/CacheBaselineTopologyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ignite.internal.processors.cache.distributed;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.affinity.AffinityFunction;
import org.apache.ignite.cache.affinity.AffinityFunctionContext;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.BaselineNode;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.WALMode;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.IgniteInterruptedCheckedException;
import org.apache.ignite.internal.cluster.DetachedClusterNode;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionFullMap;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionTopology;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.junit.Test;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.cache.PartitionLossPolicy.READ_ONLY_SAFE;
/**
*
*/
public class CacheBaselineTopologyTest extends GridCommonAbstractTest {
/** */
private static final String CACHE_NAME = "cache";
/** */
private static final int NODE_COUNT = 4;
/** */
private boolean disableAutoActivation;
/** */
private Map<String, Object> userAttrs;
/** */
private static final String DATA_NODE = "dataNodeUserAttr";
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
stopAllGrids();
cleanPersistenceDir();
disableAutoActivation = false;
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
cfg.setConsistentId(igniteInstanceName);
if (disableAutoActivation)
cfg.setAutoActivationEnabled(false);
cfg.setDataStorageConfiguration(
new DataStorageConfiguration().setDefaultDataRegionConfiguration(
new DataRegionConfiguration()
.setPersistenceEnabled(true)
.setMaxSize(100L * 1024 * 1024)
.setInitialSize(100L * 1024 * 1024)
)
.setDataRegionConfigurations(
new DataRegionConfiguration()
.setName("memory")
.setPersistenceEnabled(false)
.setMaxSize(100L * 1024 * 1024)
.setInitialSize(100L * 1024 * 1024)
)
.setWalMode(WALMode.LOG_ONLY)
);
if (userAttrs != null)
cfg.setUserAttributes(userAttrs);
return cfg;
}
/**
* Verifies that rebalance on cache with Node Filter happens when BaselineTopology changes.
*
* @throws Exception If failed.
*/
@Test
public void testRebalanceForCacheWithNodeFilter() throws Exception {
try {
final int EMPTY_NODE_IDX = 2;
userAttrs = U.newHashMap(1);
userAttrs.put(DATA_NODE, true);
startGrids(2);
userAttrs.put(DATA_NODE, false);
IgniteEx ignite = startGrid(2);
ignite.cluster().baselineAutoAdjustEnabled(false);
ignite.cluster().state(ClusterState.ACTIVE);
awaitPartitionMapExchange();
IgniteCache<Integer, Integer> cache =
ignite.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(CACHE_NAME)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
.setAffinity(new RendezvousAffinityFunction(32, null))
.setNodeFilter(new DataNodeFilter())
);
for (int k = 0; k < 10_000; k++)
cache.put(k, k);
Thread.sleep(500);
printSizesDataNodes(NODE_COUNT - 1, EMPTY_NODE_IDX);
userAttrs.put(DATA_NODE, true);
startGrid(3);
ignite.cluster().setBaselineTopology(ignite.cluster().topologyVersion());
awaitPartitionMapExchange();
Thread.sleep(500);
printSizesDataNodes(NODE_COUNT, EMPTY_NODE_IDX);
}
finally {
userAttrs = null;
}
}
/** */
private void printSizesDataNodes(int nodesCnt, int emptyNodeIdx) {
for (int i = 0; i < nodesCnt; i++) {
IgniteEx ig = grid(i);
int locSize = ig.cache(CACHE_NAME).localSize(CachePeekMode.PRIMARY);
if (i == emptyNodeIdx)
assertEquals("Cache local size on "
+ i
+ " node is expected to be zero", 0, locSize);
else
assertTrue("Cache local size on "
+ i
+ " node is expected to be non zero", locSize > 0);
}
}
/** */
private static class DataNodeFilter implements IgnitePredicate<ClusterNode> {
/** {@inheritDoc} */
@Override public boolean apply(ClusterNode clusterNode) {
return clusterNode.attribute(DATA_NODE);
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testTopologyChangesWithFixedBaseline() throws Exception {
startGrids(NODE_COUNT);
IgniteEx ignite = grid(0);
ignite.cluster().baselineAutoAdjustEnabled(false);
ignite.cluster().state(ClusterState.ACTIVE);
awaitPartitionMapExchange();
Map<ClusterNode, Ignite> nodes = new HashMap<>();
for (int i = 0; i < NODE_COUNT; i++) {
Ignite ig = grid(i);
nodes.put(ig.cluster().localNode(), ig);
}
IgniteCache<Integer, Integer> cache =
ignite.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(CACHE_NAME)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
);
int key = -1;
for (int k = 0; k < 100_000; k++) {
if (!ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(k).contains(ignite.localNode())) {
key = k;
break;
}
}
assert key >= 0;
int part = ignite.affinity(CACHE_NAME).partition(key);
Collection<ClusterNode> initMapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == 2 : initMapping;
ignite.cluster().setBaselineTopology(baselineNodes(nodes.keySet()));
awaitPartitionMapExchange();
cache.put(key, 1);
Collection<ClusterNode> mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == mapping.size() : mapping;
assert initMapping.containsAll(mapping) : mapping;
IgniteEx newIgnite = startGrid(4);
awaitPartitionMapExchange();
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == mapping.size() : mapping;
assert initMapping.containsAll(mapping) : mapping;
mapping = newIgnite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == mapping.size() : mapping;
assert initMapping.containsAll(mapping) : mapping;
Set<String> stoppedNodeNames = new HashSet<>();
ClusterNode node = mapping.iterator().next();
stoppedNodeNames.add(nodes.get(node).name());
nodes.get(node).close();
nodes.remove(node);
awaitPartitionMapExchange(true, true, null);
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert mapping.size() == 1 : mapping;
assert initMapping.containsAll(mapping);
node = mapping.iterator().next();
stoppedNodeNames.add(nodes.get(node).name());
nodes.get(node).close();
nodes.remove(node);
awaitPartitionMapExchange();
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert mapping.isEmpty() : mapping;
GridDhtPartitionTopology top = ignite.cachex(CACHE_NAME).context().topology();
assert top.lostPartitions().contains(part);
for (String nodeName : stoppedNodeNames) {
startGrid(nodeName);
}
assert ignite.cluster().nodes().size() == NODE_COUNT + 1;
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == mapping.size() : mapping;
for (ClusterNode n1 : initMapping) {
boolean found = false;
for (ClusterNode n2 : mapping) {
if (n2.consistentId().equals(n1.consistentId())) {
found = true;
break;
}
}
assert found;
}
ignite.resetLostPartitions(Collections.singleton(CACHE_NAME));
cache.put(key, 2);
}
/**
* @throws Exception If failed.
*/
@Test
public void testBaselineTopologyChangesFromServer() throws Exception {
testBaselineTopologyChanges(false);
}
/**
* @throws Exception If failed.
*/
@Test
public void testBaselineTopologyChangesFromClient() throws Exception {
testBaselineTopologyChanges(true);
}
/**
* @throws Exception if failed.
*/
@Test
public void testClusterActiveWhileBaselineChanging() throws Exception {
startGrids(NODE_COUNT);
IgniteEx ig = grid(0);
ig.cluster().baselineAutoAdjustEnabled(false);
ig.cluster().state(ClusterState.ACTIVE);
assertTrue(ig.cluster().state().active());
startGrid(NODE_COUNT);
IgniteInternalFuture fut = GridTestUtils.runAsync(() -> {
try {
U.sleep(100);
}
catch (IgniteInterruptedCheckedException e) {
e.printStackTrace();
}
ig.cluster().setBaselineTopology(NODE_COUNT + 1);
});
while (!fut.isDone()) {
assertTrue(grid(0).cluster().state().active());
assertTrue(grid(0).context().state().publicApiActiveState(false));
assertTrue(grid(NODE_COUNT).cluster().state().active());
assertTrue(grid(NODE_COUNT).context().state().publicApiActiveState(false));
}
assertNull(String.valueOf(fut.error()), fut.error());
assertEquals(NODE_COUNT + 1, ig.cluster().currentBaselineTopology().size());
}
/**
* @throws Exception If failed.
*/
private void testBaselineTopologyChanges(boolean fromClient) throws Exception {
startGrids(NODE_COUNT);
IgniteEx ignite;
if (fromClient)
ignite = startClientGrid(NODE_COUNT + 10);
else
ignite = grid(0);
ignite.cluster().baselineAutoAdjustEnabled(false);
ignite.cluster().state(ClusterState.ACTIVE);
awaitPartitionMapExchange();
Map<ClusterNode, Ignite> nodes = new HashMap<>();
for (int i = 0; i < NODE_COUNT; i++) {
Ignite ig = grid(i);
nodes.put(ig.cluster().localNode(), ig);
}
ignite.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(CACHE_NAME)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
);
manualCacheRebalancing(ignite, CACHE_NAME);
int key = -1;
for (int k = 0; k < 100_000; k++) {
if (!ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(k).contains(ignite.localNode())) {
key = k;
break;
}
}
assert key >= 0;
Collection<ClusterNode> initMapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping.size() == 2 : initMapping;
ignite.cluster().setBaselineTopology(baselineNodes(nodes.keySet()));
Set<String> stoppedNodeNames = new HashSet<>();
ClusterNode node = initMapping.iterator().next();
stoppedNodeNames.add(nodes.get(node).name());
nodes.get(node).close();
nodes.remove(node);
awaitPartitionMapExchange();
Collection<ClusterNode> mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert mapping.size() == 1 : mapping;
assert initMapping.containsAll(mapping);
Set<ClusterNode> blt2 = new HashSet<>(ignite.cluster().nodes());
ignite.cluster().setBaselineTopology(baselineNodes(
blt2.stream().filter(n -> !n.isClient()).collect(Collectors.toSet())));
awaitPartitionMapExchange();
Collection<ClusterNode> initMapping2 = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping2.size() == 2 : initMapping2;
Ignite newIgnite = startGrid(NODE_COUNT);
awaitPartitionMapExchange();
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert mapping.size() == initMapping2.size() : mapping;
assert mapping.containsAll(initMapping2);
assert ignite.affinity(CACHE_NAME).primaryPartitions(newIgnite.cluster().localNode()).length == 0;
Set<ClusterNode> blt3 = new HashSet<>(ignite.cluster().nodes());
ignite.cluster().setBaselineTopology(baselineNodes(
blt3.stream().filter(n -> !n.isClient()).collect(Collectors.toSet())));
awaitPartitionMapExchange();
Collection<ClusterNode> initMapping3 = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert initMapping3.size() == 2;
assert ignite.affinity(CACHE_NAME).primaryPartitions(newIgnite.cluster().localNode()).length > 0;
newIgnite = startGrid(NODE_COUNT + 1);
awaitPartitionMapExchange();
mapping = ignite.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert mapping.size() == initMapping3.size() : mapping;
assert mapping.containsAll(initMapping3);
assert ignite.affinity(CACHE_NAME).primaryPartitions(newIgnite.cluster().localNode()).length == 0;
ignite.cluster().setBaselineTopology(null);
awaitPartitionMapExchange();
assert ignite.affinity(CACHE_NAME).primaryPartitions(newIgnite.cluster().localNode()).length > 0;
}
/**
* @throws Exception If failed.
*/
@Test
public void testPrimaryLeft() throws Exception {
startGrids(NODE_COUNT);
IgniteEx ig = grid(0);
ig.cluster().state(ClusterState.ACTIVE);
awaitPartitionMapExchange();
IgniteCache<Integer, Integer> cache =
ig.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(CACHE_NAME)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
.setReadFromBackup(true)
.setWriteSynchronizationMode(FULL_SYNC)
.setRebalanceDelay(-1)
);
int key = 1;
List<ClusterNode> affNodes = (List<ClusterNode>)ig.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert affNodes.size() == 2;
int primaryIdx = -1;
IgniteEx primary = null;
IgniteEx backup = null;
manualCacheRebalancing(ig, CACHE_NAME);
for (int i = 0; i < NODE_COUNT; i++) {
if (grid(i).localNode().equals(affNodes.get(0))) {
primaryIdx = i;
primary = grid(i);
}
else if (grid(i).localNode().equals(affNodes.get(1)))
backup = grid(i);
}
assert primary != null;
assert backup != null;
Integer val1 = 1;
Integer val2 = 2;
cache.put(key, val1);
assertEquals(val1, primary.cache(CACHE_NAME).get(key));
assertEquals(val1, backup.cache(CACHE_NAME).get(key));
if (ig == primary) {
ig = backup;
cache = ig.cache(CACHE_NAME);
}
primary.close();
backup.context().cache().context().exchange().affinityReadyFuture(new AffinityTopologyVersion(5, 0)).get();
assertEquals(backup.localNode(), ig.affinity(CACHE_NAME).mapKeyToNode(key));
cache.put(key, val2);
assertEquals(val2, backup.cache(CACHE_NAME).get(key));
primary = startGrid(primaryIdx);
assertEquals(backup.localNode(), ig.affinity(CACHE_NAME).mapKeyToNode(key));
manualCacheRebalancing(ig, CACHE_NAME);
awaitPartitionMapExchange();
assertEquals(primary.localNode(), ig.affinity(CACHE_NAME).mapKeyToNode(key));
assertEquals(val2, primary.cache(CACHE_NAME).get(key));
assertEquals(val2, backup.cache(CACHE_NAME).get(key));
}
/**
* @throws Exception If failed.
*/
@Test
public void testPrimaryLeftAndClusterRestart() throws Exception {
startGrids(NODE_COUNT);
IgniteEx ig = grid(0);
ig.cluster().state(ClusterState.ACTIVE);
IgniteCache<Integer, Integer> cache =
ig.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(CACHE_NAME)
.setWriteSynchronizationMode(FULL_SYNC)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
.setReadFromBackup(true)
.setRebalanceDelay(-1)
);
int key = 1;
List<ClusterNode> affNodes = (List<ClusterNode>)ig.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assert affNodes.size() == 2;
int primaryIdx = -1;
int backupIdx = -1;
IgniteEx primary = null;
IgniteEx backup = null;
manualCacheRebalancing(ig, CACHE_NAME);
for (int i = 0; i < NODE_COUNT; i++) {
if (grid(i).localNode().equals(affNodes.get(0))) {
primaryIdx = i;
primary = grid(i);
}
else if (grid(i).localNode().equals(affNodes.get(1))) {
backupIdx = i;
backup = grid(i);
}
}
assert primary != null;
assert backup != null;
Integer val1 = 1;
Integer val2 = 2;
cache.put(key, val1);
assertEquals(val1, primary.cache(CACHE_NAME).get(key));
assertEquals(val1, backup.cache(CACHE_NAME).get(key));
if (ig == primary) {
ig = backup;
cache = ig.cache(CACHE_NAME);
}
stopGrid(primaryIdx, false);
backup.context().cache().context().exchange().affinityReadyFuture(new AffinityTopologyVersion(5, 0)).get();
assertEquals(backup.localNode(), ig.affinity(CACHE_NAME).mapKeyToNode(key));
cache.put(key, val2);
assertEquals(val2, backup.cache(CACHE_NAME).get(key));
stopAllGrids(false);
startGrids(NODE_COUNT);
ig = grid(0);
primary = grid(primaryIdx);
backup = grid(backupIdx);
boolean activated = GridTestUtils.waitForCondition(() -> {
for (int i = 0; i < NODE_COUNT; i++) {
if (!grid(i).cluster().state().active())
return false;
}
return true;
}, 10_000);
assert activated;
// assertEquals(backup.localNode(), ig.affinity(CACHE_NAME).mapKeyToNode(key));
assertEquals(val2, primary.cache(CACHE_NAME).get(key));
assertEquals(val2, backup.cache(CACHE_NAME).get(key));
manualCacheRebalancing(ig, CACHE_NAME);
awaitPartitionMapExchange();
affNodes = (List<ClusterNode>)ig.affinity(CACHE_NAME).mapKeyToPrimaryAndBackups(key);
assertEquals(primary.localNode(), affNodes.get(0));
assertEquals(backup.localNode(), affNodes.get(1));
assertEquals(val2, primary.cache(CACHE_NAME).get(key));
assertEquals(val2, backup.cache(CACHE_NAME).get(key));
}
/**
* @throws Exception if failed.
*/
@Test
public void testMetadataUpdate() throws Exception {
startGrids(5);
Ignite ignite3 = grid(3);
ignite3.cluster().state(ClusterState.ACTIVE);
CacheConfiguration<Object, Object> repCacheCfg = new CacheConfiguration<>("replicated")
.setCacheMode(CacheMode.REPLICATED)
.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
IgniteCache<Object, Object> cache = ignite3.getOrCreateCache(repCacheCfg);
stopGrid(0);
stopGrid(1);
stopGrid(2);
stopGrid(4);
for (int i = 0; i < 100; i++)
cache.put(i, new TestValue(i));
stopAllGrids();
startGrids(5);
GridTestUtils.waitForCondition(() -> grid(0).cluster().state().active(), getTestTimeout());
for (int g = 0; g < 5; g++) {
for (int i = 0; i < 100; i++)
assertEquals(new TestValue(i), grid(g).cache("replicated").get(i));
}
}
/**
* @throws Exception if failed.
*/
@Test
public void testClusterRestoredOnRestart() throws Exception {
startGrids(5);
Ignite ignite3 = grid(3);
ignite3.cluster().state(ClusterState.ACTIVE);
stopGrid(0);
CacheConfiguration<Object, Object> cacheConfiguration = new CacheConfiguration<>("unknown_cache");
cacheConfiguration.setBackups(3);
IgniteCache<Object, Object> cache0 = ignite3.getOrCreateCache(cacheConfiguration);
for (int i = 0; i < 2048; i++)
cache0.put(i, 0);
awaitPartitionMapExchange();
stopAllGrids();
startGrid(1); //TODO https://issues.apache.org/jira/browse/IGNITE-8717 (replace with startGrids(5); //after)
startGrid(0);
startGrid(2);
startGrid(3);
startGrid(4);
GridTestUtils.waitForCondition(() -> grid(0).cluster().state().active(), getTestTimeout());
for (int g = 0; g < 5; g++) {
for (int i = 0; i < 2048; i++)
assertEquals("For key: " + i, 0, grid(g).cache("unknown_cache").get(i));
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testNonPersistentCachesDontIgnoreBaselineTopology() throws Exception {
Ignite ig = startGrids(4);
ig.cluster().state(ClusterState.ACTIVE);
IgniteCache persistentCache = ig.createCache(CACHE_NAME);
IgniteCache inMemoryCache = ig.createCache(
new CacheConfiguration<>().setName(CACHE_NAME + 2).setDataRegionName("memory"));
Ignite newNode = startGrid(4);
awaitPartitionMapExchange();
assertEquals(0, ig.affinity(persistentCache.getName()).allPartitions(newNode.cluster().localNode()).length);
assertEquals(0, ig.affinity(inMemoryCache.getName()).allPartitions(newNode.cluster().localNode()).length);
}
/**
* @throws Exception If failed.
*/
@Test
public void testMapTxPrimaryNodes() throws Exception {
checkMapTxNodes(true, false);
}
/**
*
* @throws Exception If failed.
*/
@Test
public void testMapTxBackupNodes() throws Exception {
checkMapTxNodes(false, false);
}
/**
* @throws Exception If failed.
*/
@Test
public void testMapNearTxPrimaryNodes() throws Exception {
checkMapTxNodes(true, true);
}
/**
*
* @throws Exception If failed.
*/
@Test
public void testMapNearTxBackupNodes() throws Exception {
checkMapTxNodes(false, true);
}
/**
* @param primary Whether non-baseline node is primary.
* @param near Whether non-baseline nod is near node.
* @throws Exception If failed.
*/
public void checkMapTxNodes(boolean primary, boolean near) throws Exception {
int bltNodesCnt = 3;
Ignite ig = startGrids(bltNodesCnt);
ig.cluster().state(ClusterState.ACTIVE);
ig.createCache(new CacheConfiguration<>()
.setName(CACHE_NAME)
.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL)
.setBackups(2));
ig.createCache(
new CacheConfiguration<>()
.setName(CACHE_NAME + 1)
.setDataRegionName("memory")
.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL)
.setBackups(2)
);
Ignite nonBltIgnite = startGrid(bltNodesCnt);
awaitPartitionMapExchange();
ClusterNode nonBltNode = nonBltIgnite.cluster().localNode();
Ignite nearIgnite = near ? nonBltIgnite : ig;
IgniteCache<Integer, Integer> persistentCache = nearIgnite.cache(CACHE_NAME);
IgniteCache<Integer, Integer> inMemoryCache = nearIgnite.cache(CACHE_NAME + 1);
assertEquals(0, nearIgnite.affinity(persistentCache.getName()).allPartitions(nonBltNode).length);
assertEquals(0, nearIgnite.affinity(inMemoryCache.getName()).allPartitions(nonBltNode).length);
}
/**
* @throws Exception if failed.
*/
@Test
public void testAffinityAssignmentChangedAfterRestart() throws Exception {
int parts = 32;
final List<Integer> partMapping = new ArrayList<>();
for (int p = 0; p < parts; p++)
partMapping.add(p);
final AffinityFunction affFunc = new TestAffinityFunction(new RendezvousAffinityFunction(false, parts));
TestAffinityFunction.partsAffMapping = partMapping;
String cacheName = CACHE_NAME + 2;
startGrids(4);
IgniteEx ig = grid(0);
ig.cluster().state(ClusterState.ACTIVE);
IgniteCache<Integer, Integer> cache = ig.createCache(
new CacheConfiguration<Integer, Integer>()
.setName(cacheName)
.setCacheMode(PARTITIONED)
.setBackups(1)
.setPartitionLossPolicy(READ_ONLY_SAFE)
.setReadFromBackup(true)
.setWriteSynchronizationMode(FULL_SYNC)
.setRebalanceDelay(-1)
.setAffinity(affFunc));
Map<Integer, String> keyToConsId = new HashMap<>();
for (int k = 0; k < 1000; k++) {
cache.put(k, k);
keyToConsId.put(k, ig.affinity(cacheName).mapKeyToNode(k).consistentId().toString());
}
stopAllGrids();
Collections.shuffle(TestAffinityFunction.partsAffMapping, new Random(1));
/* There is a problem with handling simultaneous auto activation after restart and manual activation.
To properly catch the moment when cluster activation has finished we temporary disable auto activation. */
disableAutoActivation = true;
startGrids(4);
ig = grid(0);
ig.cluster().state(ClusterState.ACTIVE);
cache = ig.cache(cacheName);
GridDhtPartitionFullMap partMap = ig.cachex(cacheName).context().topology().partitionMap(false);
for (int i = 1; i < 4; i++) {
IgniteEx ig0 = grid(i);
for (int p = 0; p < 32; p++) {
assertEqualsCollections(
ig.affinity(cacheName).mapPartitionToPrimaryAndBackups(p),
ig0.affinity(cacheName).mapPartitionToPrimaryAndBackups(p)
);
}
}
for (Map.Entry<Integer, String> e : keyToConsId.entrySet()) {
int p = ig.affinity(cacheName).partition(e.getKey());
assertEquals("p=" + p, GridDhtPartitionState.OWNING, partMap.get(ig.affinity(cacheName).mapKeyToNode(e.getKey()).id()).get(p));
}
for (int k = 0; k < 1000; k++)
assertEquals("k=" + k, Integer.valueOf(k), cache.get(k));
}
/**
* Verify that in case of setting baseline topology with offline node among others
* {@link IgniteException} is thrown.
*
* @throws Exception If failed.
*/
@Test
@SuppressWarnings({"unchecked", "ThrowableNotThrown"})
public void testSettingBaselineTopologyWithOfflineNode() throws Exception {
Ignite ignite = startGrids(2);
ignite.cluster().state(ClusterState.ACTIVE);
ignite(0).createCache(defaultCacheConfiguration().setNodeFilter(
(IgnitePredicate<ClusterNode>)node -> node.attribute("some-attr") != null));
Collection<ClusterNode> nodes = new ArrayList<>(ignite.cluster().nodes());
nodes.add(new DetachedClusterNode("non-existing-node-id", null));
GridTestUtils.assertThrows(log, (Callable<Void>)() -> {
ignite.cluster().setBaselineTopology(nodes);
return null;
}, IgniteException.class, "Check arguments. Node with consistent ID [non-existing-node-id] " +
"not found in server nodes.");
}
/**
* Verify that in case of setting baseline topology with offline node among others {@link IgniteException} is
* thrown.
*
* @throws Exception If failed.
*/
@Test
@SuppressWarnings({"unchecked", "ThrowableNotThrown"})
public void testSettingBaselineTopologyWithOfflineNodeFromOldTopology() throws Exception {
Ignite ignite = startGrids(2);
ignite.cluster().baselineAutoAdjustEnabled(false);
ignite.cluster().state(ClusterState.ACTIVE);
stopGrid(1);
ignite.cluster().setBaselineTopology(ignite.cluster().topologyVersion());
GridTestUtils.assertThrows(log, (Callable<Void>)() -> {
ignite.cluster().setBaselineTopology(ignite.cluster().topologyVersion() - 1);
return null;
}, IgniteException.class, "Check arguments. Node with consistent ID " +
"[distributed.CacheBaselineTopologyTest1] not found in server nodes.");
}
/** */
private Collection<BaselineNode> baselineNodes(Collection<ClusterNode> clNodes) {
Collection<BaselineNode> res = new ArrayList<>(clNodes.size());
res.addAll(clNodes);
return res;
}
/**
*
*/
private static class TestValue {
/** */
int f1;
/** */
int f2;
/** */
int f3;
/** */
int f4;
/**
* @param a Init value.
*/
private TestValue(int a) {
f1 = f2 = f3 = f4 = a;
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof TestValue))
return false;
TestValue other = (TestValue)o;
return
f1 == other.f1 &&
f2 == other.f2 &&
f3 == other.f3 &&
f4 == other.f4;
}
/** {@inheritDoc} */
@Override public int hashCode() {
int result = f1;
result = 31 * result + f2;
result = 31 * result + f3;
result = 31 * result + f4;
return result;
}
}
/**
*
*/
private static class TestAffinityFunction implements AffinityFunction {
/** */
private final AffinityFunction delegate;
/** */
private static List<Integer> partsAffMapping;
/** */
public TestAffinityFunction(AffinityFunction delegate) {
this.delegate = delegate;
}
/** {@inheritDoc} */
@Override public void reset() {
delegate.reset();
}
/** {@inheritDoc} */
@Override public int partitions() {
return delegate.partitions();
}
/** {@inheritDoc} */
@Override public int partition(Object key) {
return delegate.partition(key);
}
/** {@inheritDoc} */
@Override public List<List<ClusterNode>> assignPartitions(AffinityFunctionContext affCtx) {
List<List<ClusterNode>> res0 = delegate.assignPartitions(affCtx);
List<List<ClusterNode>> res = new ArrayList<>(res0.size());
for (int p = 0; p < res0.size(); p++)
res.add(p, null);
for (int p = 0; p < res0.size(); p++)
res.set(partsAffMapping.get(p), res0.get(p));
return res;
}
/** {@inheritDoc} */
@Override public void removeNode(UUID nodeId) {
delegate.removeNode(nodeId);
}
}
}
|
apache/incubator-kie-drools | 35,347 | drools-traits/src/main/java/org/drools/traits/core/factmodel/TraitCoreWrapperClassBuilderImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.drools.traits.core.factmodel;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.drools.compiler.builder.impl.classbuilder.BuildUtils;
import org.drools.base.factmodel.ClassDefinition;
import org.drools.base.factmodel.FieldDefinition;
import org.drools.base.factmodel.traits.CoreWrapper;
import org.drools.base.factmodel.traits.Thing;
import org.drools.base.factmodel.traits.TraitConstants;
import org.drools.base.factmodel.traits.TraitFieldTMS;
import org.drools.base.factmodel.traits.Traitable;
import org.drools.mvel.asm.AsmUtil;
import org.kie.api.definition.type.FactField;
import org.mvel2.asm.AnnotationVisitor;
import org.mvel2.asm.ClassWriter;
import org.mvel2.asm.FieldVisitor;
import org.mvel2.asm.Label;
import org.mvel2.asm.MethodVisitor;
import org.mvel2.asm.Opcodes;
import org.mvel2.asm.Type;
import static org.drools.mvel.asm.ClassGenerator.createClassWriter;
public class TraitCoreWrapperClassBuilderImpl implements TraitCoreWrapperClassBuilder, Serializable {
public byte[] buildClass( ClassDefinition core, ClassLoader classLoader ) throws IOException,
SecurityException,
IllegalArgumentException,
ClassNotFoundException,
NoSuchMethodException,
IllegalAccessException,
InvocationTargetException,
InstantiationException,
NoSuchFieldException {
Class coreKlazz = core.getDefinedClass();
String coreName = coreKlazz.getName();
String wrapperName = coreName + "Wrapper";
FieldVisitor fv;
MethodVisitor mv;
ClassWriter cw = createClassWriter( classLoader,
ACC_PUBLIC + ACC_SUPER,
BuildUtils.getInternalType( wrapperName ),
BuildUtils.getTypeDescriptor( coreName ) +
"Lorg/drools/core/factmodel/traits/CoreWrapper<" + BuildUtils.getTypeDescriptor( coreName ) + ">;",
BuildUtils.getInternalType( coreName ),
new String[] { Type.getInternalName( CoreWrapper.class ), Type.getInternalName(Externalizable.class ) } );
{
AnnotationVisitor av0 = cw.visitAnnotation(Type.getDescriptor( Traitable.class ), true );
av0.visit( "logical", core.isFullTraiting() );
}
{
fv = cw.visitField( ACC_PRIVATE, "core", BuildUtils.getTypeDescriptor( coreName ), null, null);
fv.visitEnd();
}
{
fv = cw.visitField(ACC_PRIVATE, TraitConstants.MAP_FIELD_NAME, Type.getDescriptor(Map.class ), "Ljava/util/Map<Ljava/lang/String;Ljava/lang/Object;>;", null );
fv.visitEnd();
}
{
fv = cw.visitField( ACC_PRIVATE, TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ), "Ljava/util/Map<Ljava/lang/String;Lorg/drools/factmodel/traits/Thing;>;", null );
fv.visitEnd();
}
{
fv = cw.visitField(ACC_PRIVATE, TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ), null, null );
fv.visitEnd();
}
{
mv = cw.visitMethod( ACC_PUBLIC, "<init>", "()V", null, null );
mv.visitCode();
try {
coreKlazz.getConstructor();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKESPECIAL, BuildUtils.getInternalType( coreName ), "<init>", "()V" );
} catch ( NoSuchMethodException nsme ) {
Constructor con = coreKlazz.getConstructors()[ 0 ];
Class[] params = con.getParameterTypes();
mv.visitVarInsn( ALOAD, 0 );
for ( Class param : params ) {
mv.visitInsn( AsmUtil.zero( param.getName() ) );
}
mv.visitMethodInsn( INVOKESPECIAL,
BuildUtils.getInternalType( coreName ),
"<init>",
Type.getConstructorDescriptor( con ) );
}
// mv.visitVarInsn( ALOAD, 0 );
// mv.visitTypeInsn( NEW, Type.getInternalName( HashMap.class ) );
// mv.visitInsn( DUP );
// mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( HashMap.class ), "<init>", "()V" );
// mv.visitFieldInsn( PUTFIELD,
// BuildUtils.getInternalType( wrapperName ),
// TraitConstants.MAP_FIELD_NAME,
// Type.getDescriptor( Map.class ) );
// mv.visitVarInsn( ALOAD, 0 );
// mv.visitTypeInsn( NEW, Type.getInternalName( VetoableTypedMap.class ) );
// mv.visitInsn( DUP );
// mv.visitTypeInsn( NEW, Type.getInternalName( HashMap.class ) );
// mv.visitInsn( DUP );
// mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( HashMap.class ), "<init>", "()V" );
// mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( VetoableTypedMap.class ), "<init>", "(" + Type.getDescriptor( Map.class ) + ")V" );
// mv.visitFieldInsn( PUTFIELD,
// BuildUtils.getInternalType( wrapperName ),
// TraitConstants.TRAITSET_FIELD_NAME,
// Type.getDescriptor( Map.class ) );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
if ( needsMethod( coreKlazz, "getCore" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "getCore", "()" + Type.getDescriptor( Object.class ), "()"+BuildUtils.getTypeDescriptor( coreName ), null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD,
BuildUtils.getInternalType( wrapperName ),
"core",
BuildUtils.getTypeDescriptor( coreName ));
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "_getDynamicProperties" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "_getDynamicProperties", "()" + Type.getDescriptor( Map.class ), "()Ljava/util/Map<Ljava/lang/String;Ljava/lang/Object;>;", null);
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD,
BuildUtils.getInternalType( wrapperName ),
TraitConstants.MAP_FIELD_NAME,
Type.getDescriptor( Map.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
mv = cw.visitMethod( ACC_PUBLIC,
"_setDynamicProperties",
"(" + Type.getDescriptor( Map.class ) + ")V",
"(Ljava/util/Map<Ljava/lang/String;Ljava/lang/Object;>;)V",
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.MAP_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "_getTraitMap" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "_getTraitMap", "()" + Type.getDescriptor( Map.class ),
"()Ljava/util/Map<Ljava/lang/String;Lorg/drools/factmodel/traits/Thing;>;", null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
Label l0 = new Label();
mv.visitJumpInsn( IFNULL, l0 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitFieldInsn( GETSTATIC, Type.getInternalName( Collections.class ), "EMPTY_MAP", Type.getDescriptor( Map.class ) );
Label l1 = new Label();
mv.visitJumpInsn( IF_ACMPNE, l1 );
mv.visitLabel( l0 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitTypeInsn( NEW, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitInsn( DUP );
mv.visitTypeInsn( NEW, Type.getInternalName( HashMap.class ) );
mv.visitInsn( DUP );
mv.visitMethodInsn( INVOKESPECIAL, Type.getInternalName( HashMap.class ), "<init>", "()V" );
mv.visitMethodInsn(INVOKESPECIAL, Type.getInternalName( TraitTypeMapImpl.class ), "<init>", "(" + Type.getDescriptor(Map.class ) + ")V" );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitLabel( l1 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "setTraitMap", Map.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "setTraitMap", "(Ljava/util/Map;)V", null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitTypeInsn( NEW, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitInsn( DUP );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn(INVOKESPECIAL, Type.getInternalName( TraitTypeMapImpl.class ), "<init>", "(" + Type.getDescriptor(Map.class ) + ")V" );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "addTrait", String.class, Thing.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "addTrait",
"(" + Type.getDescriptor( String.class ) + Type.getDescriptor( Thing.class ) + ")V",
"(" + Type.getDescriptor( String.class ) + Type.getDescriptor( Thing.class ) + ")V",
null);
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "_getTraitMap", "()" + Type.getDescriptor( Map.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitVarInsn( ALOAD, 1 );
mv.visitVarInsn( ALOAD, 2 );
mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName( TraitTypeMapImpl.class ), "putSafe",
"(" + Type.getDescriptor( String.class ) + Type.getDescriptor( Thing.class ) + ")" + Type.getDescriptor( Thing.class ) );
mv.visitInsn( POP );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "getTrait", String.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "getTrait",
"(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( Thing.class ),
"(" + Type.getDescriptor( String.class ) + ")" + Type.getDescriptor( Thing.class ),
null );
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKEVIRTUAL, BuildUtils.getInternalType(wrapperName), "_getTraitMap", "()" + Type.getDescriptor(Map.class));
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEINTERFACE, Type.getInternalName(Map.class), "get",
"(" + Type.getDescriptor(Object.class) + ")" + Type.getDescriptor(Object.class));
mv.visitTypeInsn(CHECKCAST, Type.getInternalName(Thing.class));
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "hasTraits" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC,
"hasTraits",
Type.getMethodDescriptor( Type.getType( boolean.class )),
null,
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn(INVOKEVIRTUAL, BuildUtils.getInternalType(wrapperName), "_getTraitMap", "()" + Type.getDescriptor(Map.class));
Label l5 = new Label();
mv.visitJumpInsn( IFNULL, l5 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn(INVOKEVIRTUAL, BuildUtils.getInternalType(wrapperName), "_getTraitMap", "()" + Type.getDescriptor(Map.class));
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "isEmpty", Type.getMethodDescriptor( Type.BOOLEAN_TYPE));
mv.visitJumpInsn( IFNE, l5 );
mv.visitInsn( ICONST_1 );
Label l4 = new Label();
mv.visitJumpInsn( GOTO, l4 );
mv.visitLabel( l5 );
mv.visitInsn( ICONST_0 );
mv.visitLabel( l4 );
mv.visitInsn( IRETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "hasTrait", String.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "hasTrait", "(" + Type.getDescriptor( String.class )+ ")Z", null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "_getTraitMap", "()" + Type.getDescriptor( Map.class ) );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "containsKey", "(" + Type.getDescriptor( Object.class ) + ")Z" );
mv.visitInsn( IRETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "removeTrait", String.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "removeTrait",
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(String.class)),
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(String.class)),
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "_getTraitMap", Type.getMethodDescriptor( Type.getType( Map.class )));
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName( TraitTypeMapImpl.class ), "removeCascade",
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(String.class)));
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
mv = cw.visitMethod( ACC_PUBLIC, "removeTrait",
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(BitSet.class)),
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(BitSet.class)),
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "_getTraitMap", Type.getMethodDescriptor( Type.getType( Map.class )));
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitVarInsn(ALOAD, 1);
mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName( TraitTypeMapImpl.class ), "removeCascade",
Type.getMethodDescriptor(Type.getType( Collection.class ), Type.getType(BitSet.class)));
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "getTraits" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "getTraits", "()" + Type.getDescriptor( Collection.class ), "()Ljava/util/Collection<Ljava/lang/String;>;", null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "_getTraitMap", "()" + Type.getDescriptor( Map.class ) );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( Map.class ), "keySet", "()" + Type.getDescriptor( Set.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "_setBottomTypeCode" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "_setBottomTypeCode", "(" + Type.getDescriptor( BitSet.class )+ ")V", null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME , Type.getDescriptor( Map.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn(INVOKEVIRTUAL, Type.getInternalName( TraitTypeMapImpl.class ), "setBottomCode", "(" + Type.getDescriptor(BitSet.class ) + ")V");
mv.visitInsn( RETURN );
mv.visitMaxs( 0,0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "getCurrentTypeCode" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC, "getCurrentTypeCode", "()" + Type.getDescriptor( BitSet.class ), null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD,
BuildUtils.getInternalType( wrapperName ),
TraitConstants.TRAITSET_FIELD_NAME,
Type.getDescriptor( Map.class ) );
Label l3 = new Label();
mv.visitJumpInsn( IFNONNULL, l3 );
mv.visitInsn( ACONST_NULL );
mv.visitInsn( ARETURN );
mv.visitLabel( l3 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD,
BuildUtils.getInternalType( wrapperName ),
TraitConstants.TRAITSET_FIELD_NAME,
Type.getDescriptor( Map.class ) );
mv.visitTypeInsn( CHECKCAST,
Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitMethodInsn( INVOKEVIRTUAL,
Type.getInternalName( TraitTypeMapImpl.class ),
"getCurrentTypeCode",
"()" + Type.getDescriptor( BitSet.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "getMostSpecificTraits" ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC,
"getMostSpecificTraits",
"()" + Type.getDescriptor( Collection.class ),
"()Ljava/util/Collection<Lorg/drools/factmodel/traits/Thing;>;",
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ),
TraitConstants.TRAITSET_FIELD_NAME ,
Type.getDescriptor( Map.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitTypeMapImpl.class ) );
mv.visitMethodInsn( INVOKEVIRTUAL,
Type.getInternalName( TraitTypeMapImpl.class ),
"getMostSpecificTraits",
"()" + Type.getDescriptor( Collection.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "_getFieldTMS", TraitFieldTMS.class ) ) {
{
mv = cw.visitMethod( Opcodes.ACC_PUBLIC,
"_getFieldTMS",
Type.getMethodDescriptor( Type.getType( TraitFieldTMS.class )),
null,
null);
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ) );
mv.visitInsn( ARETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
if ( needsMethod( coreKlazz, "_setFieldTMS", TraitFieldTMS.class ) ) {
{
mv = cw.visitMethod( ACC_PUBLIC,
"_setFieldTMS",
Type.getMethodDescriptor(Type.VOID_TYPE, Type.getType(TraitFieldTMS.class)),
null,
null);
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitFieldInsn ( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ) );
mv.visitInsn( RETURN) ;
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
{
mv = cw.visitMethod( ACC_PUBLIC, "writeExternal", "(" + Type.getDescriptor( ObjectOutput.class ) + ")V", null, new String[] { Type.getInternalName( IOException.class ) } );
mv.visitCode();
mv.visitVarInsn( ALOAD, 1 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitMethodInsn( INVOKEVIRTUAL, BuildUtils.getInternalType( wrapperName ), "getCore", "()" + Type.getDescriptor( Object.class ) );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V" );
mv.visitVarInsn( ALOAD, 1 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.MAP_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V" );
mv.visitVarInsn( ALOAD, 1 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V" );
if ( core.isFullTraiting() ) {
mv.visitVarInsn( ALOAD, 1 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectOutput.class ), "writeObject", "(" + Type.getDescriptor( Object.class ) + ")V" );
}
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
{
mv = cw.visitMethod( ACC_PUBLIC, "readExternal", "(" + Type.getDescriptor( ObjectInput.class ) + ")V", null,
new String[] { Type.getInternalName( IOException.class ), Type.getInternalName( ClassNotFoundException.class ) } );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ) );
mv.visitTypeInsn( CHECKCAST, BuildUtils.getInternalType( coreName ) );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), "core", BuildUtils.getTypeDescriptor( coreName ) );
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( Map.class ) );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.MAP_FIELD_NAME, Type.getDescriptor( Map.class ) );
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( Map.class ) );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.TRAITSET_FIELD_NAME, Type.getDescriptor( Map.class ) );
if ( core.isFullTraiting() ) {
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitMethodInsn( INVOKEINTERFACE, Type.getInternalName( ObjectInput.class ), "readObject", "()" + Type.getDescriptor( Object.class ) );
mv.visitTypeInsn( CHECKCAST, Type.getInternalName( TraitFieldTMS.class ) );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ) );
}
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
{
mv = cw.visitMethod( ACC_PUBLIC, "init", "("+ BuildUtils.getTypeDescriptor( coreName ) +")V", null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitFieldInsn( PUTFIELD,
BuildUtils.getInternalType( wrapperName ),
"core",
BuildUtils.getTypeDescriptor( coreName ) );
initializeDynamicTypeStructures( mv, wrapperName, core );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
Method[] ms = coreKlazz.getMethods();
for ( Method method : ms ) {
if ( Modifier.isFinal( method.getModifiers() ) ) {
continue;
}
String signature = TraitFactoryImpl.buildSignature(method );
{
mv = cw.visitMethod( ACC_PUBLIC,
method.getName(),
signature,
null,
null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), "core", BuildUtils.getTypeDescriptor( coreName ) );
Label l0 = new Label();
mv.visitJumpInsn( IFNONNULL, l0 );
if ( method.getReturnType() == void.class ) {
mv.visitInsn( RETURN );
} else {
mv.visitInsn( AsmUtil.zero( method.getReturnType().getName() ) );
mv.visitInsn( AsmUtil.returnType( method.getReturnType().getName() ) );
}
mv.visitLabel( l0 );
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), "core", BuildUtils.getTypeDescriptor( coreName ) );
int j = 1;
for ( Class arg : method.getParameterTypes() ) {
mv.visitVarInsn( AsmUtil.varType( arg.getName() ), j++ );
}
mv.visitMethodInsn( INVOKEVIRTUAL,
BuildUtils.getInternalType( coreName ),
method.getName(),
signature );
mv.visitInsn( AsmUtil.returnType( method.getReturnType().getName() ) );
int stack = TraitFactoryImpl.getStackSize(method );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
}
{
mv = cw.visitMethod( ACC_PUBLIC + ACC_BRIDGE + ACC_SYNTHETIC, "init", "(" + Type.getDescriptor( Object.class ) + ")V", null, null );
mv.visitCode();
mv.visitVarInsn( ALOAD, 0 );
mv.visitVarInsn( ALOAD, 1 );
mv.visitTypeInsn( CHECKCAST, BuildUtils.getInternalType( coreName ) );
mv.visitMethodInsn( INVOKEVIRTUAL,
BuildUtils.getInternalType( wrapperName ),
"init",
"(" + BuildUtils.getTypeDescriptor( coreName ) + ")V" );
mv.visitInsn( RETURN );
mv.visitMaxs( 0, 0 );
mv.visitEnd();
}
cw.visitEnd();
return cw.toByteArray();
}
protected void initializeDynamicTypeStructures( MethodVisitor mv, String wrapperName, ClassDefinition coreDef ) {
if ( coreDef.isFullTraiting() ) {
mv.visitVarInsn( ALOAD, 0 );
mv.visitTypeInsn( NEW, TraitFieldTMS.TYPE_NAME );
mv.visitInsn( DUP );
mv.visitMethodInsn( INVOKESPECIAL, TraitFieldTMS.TYPE_NAME, "<init>", "()V" );
mv.visitFieldInsn( PUTFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ) );
for ( FactField hardField : coreDef.getFields() ) {
FieldDefinition fld = (FieldDefinition) hardField;
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), TraitConstants.FIELDTMS_FIELD_NAME, Type.getDescriptor( TraitFieldTMS.class ) );
mv.visitLdcInsn( Type.getType( Type.getDescriptor( coreDef.getDefinedClass() ) ) );
mv.visitLdcInsn( fld.resolveAlias() );
if ( BuildUtils.isPrimitive( fld.getTypeName() ) ) {
// mv.visitFieldInsn( GETSTATIC, BuildUtils.getInternalType( BuildUtils.box( fld.getTypeName() ) ), "TYPE", Type.getDescriptor( Class.class ) );
mv.visitLdcInsn( Type.getType( BuildUtils.getTypeDescriptor( BuildUtils.box( fld.getTypeName() ) ) ) );
} else {
mv.visitLdcInsn( Type.getType( BuildUtils.getTypeDescriptor( fld.getTypeName() ) ) );
}
mv.visitVarInsn( ALOAD, 0 );
mv.visitFieldInsn( GETFIELD, BuildUtils.getInternalType( wrapperName ), "core", Type.getDescriptor( coreDef.getDefinedClass() ) );
mv.visitMethodInsn( INVOKEVIRTUAL, Type.getInternalName( coreDef.getDefinedClass() ), BuildUtils.getterName( fld.getName(), fld.getTypeName() ), "()" + BuildUtils.getTypeDescriptor( fld.getTypeName() ) );
if ( BuildUtils.isPrimitive( fld.getTypeName() ) ) {
mv.visitMethodInsn( INVOKESTATIC, BuildUtils.getInternalType( BuildUtils.box( fld.getTypeName() ) ), "valueOf", "(" + BuildUtils.getTypeDescriptor( fld.getTypeName() ) + ")" + BuildUtils.getTypeDescriptor( BuildUtils.box( fld.getTypeName() ) ) );
}
if ( fld.getInitExpr() != null ) {
mv.visitLdcInsn( fld.getInitExpr() );
} else {
mv.visitInsn( ACONST_NULL );
}
mv.visitMethodInsn( INVOKEINTERFACE,
Type.getInternalName( TraitFieldTMS.class ),
"registerField",
Type.getMethodDescriptor(Type.VOID_TYPE, Type.getType(Class.class), Type.getType(String.class), Type.getType(Class.class), Type.getType(Object.class), Type.getType(String.class)));
}
}
}
protected boolean needsMethod( Class coreKlazz, String methodName, Class... args ) {
try {
return coreKlazz.getMethod( methodName, args ) == null;
} catch ( NoSuchMethodException e ) {
return true;
}
}
}
|
googleapis/google-cloud-java | 35,152 | java-assured-workloads/proto-google-cloud-assured-workloads-v1/src/main/java/com/google/cloud/assuredworkloads/v1/ListWorkloadsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/assuredworkloads/v1/assuredworkloads.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.assuredworkloads.v1;
/**
*
*
* <pre>
* Request for fetching workloads in an organization.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.ListWorkloadsRequest}
*/
public final class ListWorkloadsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1.ListWorkloadsRequest)
ListWorkloadsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListWorkloadsRequest.newBuilder() to construct.
private ListWorkloadsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListWorkloadsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListWorkloadsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.class,
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest)) {
return super.equals(obj);
}
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest other =
(com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for fetching workloads in an organization.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.ListWorkloadsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1.ListWorkloadsRequest)
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.class,
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.Builder.class);
}
// Construct using com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest getDefaultInstanceForType() {
return com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest build() {
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest buildPartial() {
com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest result =
new com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest) {
return mergeFrom((com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest other) {
if (other == com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent Resource to list workloads from.
* Must be of the form `organizations/{org_id}/locations/{location}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page size.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token returned from previous request. Page token contains context from
* previous request. Page token needs to be passed in the second and following
* requests.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A custom filter for filtering by properties of a workload. At this time,
* only filtering by labels is supported.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1.ListWorkloadsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1.ListWorkloadsRequest)
private static final com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest();
}
public static com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListWorkloadsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListWorkloadsRequest>() {
@java.lang.Override
public ListWorkloadsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListWorkloadsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListWorkloadsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.ListWorkloadsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/systemds | 34,002 | src/main/java/org/apache/sysds/runtime/frame/data/columns/Array.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.frame.data.columns;
import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Writable;
import org.apache.sysds.common.Types.ValueType;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.compress.colgroup.mapping.AMapToData;
import org.apache.sysds.runtime.compress.estim.sample.SampleEstimatorFactory;
import org.apache.sysds.runtime.frame.data.columns.ArrayFactory.FrameArrayType;
import org.apache.sysds.runtime.frame.data.compress.ArrayCompressionStatistics;
import org.apache.sysds.runtime.matrix.data.Pair;
import org.apache.sysds.utils.stats.Timing;
/**
* Generic, resizable native arrays for the internal representation of the columns in the FrameBlock. We use this custom
* class hierarchy instead of Trove or other libraries in order to avoid unnecessary dependencies.
*/
public abstract class Array<T> implements Writable {
protected static final Log LOG = LogFactory.getLog(Array.class.getName());
/** Parallelization threshold for parallelizing vector operations */
public static int ROW_PARALLELIZATION_THRESHOLD = 10000;
/** A soft reference to a memorization of this arrays mapping, used in transformEncode */
protected SoftReference<Map<T, Integer>> _rcdMapCache = null;
/** The current allocated number of elements in this Array */
protected int _size;
protected Array(int size) {
_size = size;
if(size <= 0)
throw new DMLRuntimeException("Invalid zero/negative size of Array");
}
protected int newSize() {
return Math.max(_size * 2, 4);
}
/**
* Get the current cached recode map.
*
* @return The cached recode map
*/
public final SoftReference<Map<T, Integer>> getCache() {
return _rcdMapCache;
}
/**
* Set the cached hashmap cache of this Array allocation, to be used in transformEncode.
*
* @param m The element to cache.
*/
public final void setCache(SoftReference<Map<T, Integer>> m) {
_rcdMapCache = m;
}
/**
* Get a recode map that maps each unique value in the array, to a long ID. Null values are ignored, and not included
* in the mapping. The resulting recode map in stored in a soft reference to speed up repeated calls to the same
* column.
*
* @return A recode map
*/
public synchronized final Map<T, Integer> getRecodeMap() {
return getRecodeMap(4);
}
/**
* Get a recode map that maps each unique value in the array, to a long ID. Null values are ignored, and not included
* in the mapping. The resulting recode map in stored in a soft reference to speed up repeated calls to the same
* column.
*
* @param estimate The estimated number of unique values in this Array to start the initial hashmap size at
* @return A recode map
*/
public synchronized final Map<T, Integer> getRecodeMap(int estimate) {
try {
return getRecodeMap(estimate, null, -1);
}
catch(Exception e) {
throw new RuntimeException(e);
}
}
/**
* Get a recode map that maps each unique value in the array, to a long ID. Null values are ignored, and not included
* in the mapping. The resulting recode map in stored in a soft reference to speed up repeated calls to the same
* column.
*
* @param estimate the estimated number of unique values in this array.
* @param pool An executor pool to be used for parallel execution (Note this method does not shutdown the pool)
* @param k Parallelization degree allowed
* @return A recode map
* @throws ExecutionException if the parallel execution fails
* @throws InterruptedException if the parallel execution fails
*/
public synchronized final Map<T, Integer> getRecodeMap(int estimate, ExecutorService pool, int k)
throws InterruptedException, ExecutionException {
// probe cache for existing map
Map<T, Integer> map;
SoftReference<Map<T, Integer>> tmp = getCache();
map = (tmp != null) ? tmp.get() : null;
if(map != null)
return map;
// construct recode map
map = createRecodeMap(estimate, pool, k);
// put created map into cache
setCache(new SoftReference<>(map));
return map;
}
/**
* Get a recode map that maps each unique value in the array, to a long ID. Null values are ignored, and not included
* in the mapping. The resulting recode map in stored in a soft reference to speed up repeated calls to the same
* column.
*
* @param estimate The estimate number of unique values inside this array.
* @param pool The thread pool to use for parallel creation of recode map (can be null). (Note this method does
* not shutdown the pool)
* @param k The allowed degree of parallelism
* @return The recode map created.
* @throws ExecutionException if the parallel execution fails
* @throws InterruptedException if the parallel execution fails
*/
protected HashMapToInt<T> createRecodeMap(int estimate, ExecutorService pool, int k)
throws InterruptedException, ExecutionException {
final boolean debug = LOG.isDebugEnabled();
final Timing t = debug ? new Timing() : null;
final int s = size();
final HashMapToInt<T> ret;
if(k <= 1 || pool == null || s < ROW_PARALLELIZATION_THRESHOLD)
ret = createRecodeMap(estimate, 0, s);
else
ret = parallelCreateRecodeMap(estimate, pool, s, k);
if(debug) {
String base = "CreateRecodeMap estimate: %10d actual %10d time: %10.5f";
LOG.debug(String.format(base, estimate, ret.size(), t.stop()));
}
return ret;
}
private HashMapToInt<T> parallelCreateRecodeMap(int estimate, ExecutorService pool, final int s, int k)
throws InterruptedException, ExecutionException {
final int blk = Math.max(ROW_PARALLELIZATION_THRESHOLD / 2, (s + k) / k);
final List<Future<HashMapToInt<T>>> tasks = new ArrayList<>();
for(int i = blk; i < s; i += blk) { // start at blk for the other threads
final int start = i;
final int end = Math.min(i + blk, s);
tasks.add(pool.submit(() -> createRecodeMap(estimate, start, end)));
}
// make the initial map thread local allocation.
final HashMapToInt<T> map = new HashMapToInt<T>((int) (estimate * 1.3));
createRecodeMap(map, 0, blk);
for(int i = 0; i < tasks.size(); i++) { // merge with other threads work.
final HashMapToInt<T> map2 = tasks.get(i).get();
mergeRecodeMaps(map, map2);
}
return map;
}
/**
* Merge Recode maps, most likely from parallel threads.
*
* If the unique value is present in the target, use that ID, otherwise this method map to new ID's based on the
* target mapping's size.
*
* @param target The target object to merge the two maps into
* @param from The Map to take entries from.
*/
protected static <T> void mergeRecodeMaps(HashMapToInt<T> target, HashMapToInt<T> from) {
final List<T> fromEntriesOrdered = new ArrayList<>(Collections.nCopies(from.size(), null));
from.forEach((k, v) -> {
fromEntriesOrdered.set(v - 1, k);
});
int id = target.size();
for(T e : fromEntriesOrdered) {
if(target.putIfAbsentI(e, id) == -1)
id++;
}
}
protected HashMapToInt<T> createRecodeMap(final int estimate, final int s, final int e) {
// * 1.3 because we hashMap has a load factor of 1.75
final HashMapToInt<T> map = new HashMapToInt<>((int) (Math.min((long) estimate, (e - s)) * 1.3));
return createRecodeMap(map, s, e);
}
protected HashMapToInt<T> createRecodeMap(HashMapToInt<T> map, final int s, final int e) {
int id = 1;
for(int i = s; i < e; i++)
id = addValRecodeMap(map, id, i);
return map;
}
protected int addValRecodeMap(HashMapToInt<T> map, int id, int i) {
final T val = getInternal(i);
if(val != null && map.putIfAbsentI(val, id) == -1)
id++;
return id;
}
/**
* Get the number of elements in the array, this does not necessarily reflect the current allocated size.
*
* @return the current number of elements
*/
public final int size() {
return _size;
}
/**
* Get the value at a given index.
*
* This method returns objects that have a high overhead in allocation. Therefore it is not as efficient as using the
* vectorized operations specified in the object.
*
* @param index The index to query
* @return The value returned as an object
*/
public abstract T get(int index);
/**
* Get the internal value at a given index. For instance HashIntegerArray would return the underlying long not a
* string.
*
* @param index the index to get
* @return The value to get
*/
public T getInternal(int index) {
return get(index);
}
/**
* Get the underlying array out of the column Group,
*
* it is the responsibility of the caller to know what type it is.
*
* Also it is not guaranteed that the underlying data structure does not allocate an appropriate response to the
* caller. This in practice means that if called there is a possibility that the entire array is allocated again. So
* the method should only be used for debugging purposes not for performance.
*
* @return The underlying array.
*/
public abstract Object get();
/**
* Get the index's value.
*
* returns 0 in case of Null.
*
* @param i index to get value from
* @return the value
*/
public abstract double getAsDouble(int i);
/**
* Get the index's value.
*
* returns Double.NaN in case of Null.
*
* @param i index to get value from
* @return the value
*/
public double getAsNaNDouble(int i) {
return getAsDouble(i);
}
/**
* Set index to the given value of same type
*
* @param index The index to set
* @param value The value to assign
*/
public abstract void set(int index, T value);
/**
* Set index to given double value (cast to the correct type of this array)
*
* @param index the index to set
* @param value the value to set it to (before casting to correct value type)
*/
public abstract void set(int index, double value);
/**
* Set index to the given value of the string parsed.
*
* @param index The index to set
* @param value The value to assign
*/
public abstract void set(int index, String value);
/**
* Set range to given arrays value
*
* @param rl row lower
* @param ru row upper (inclusive)
* @param value value array to take values from (other type)
*/
public abstract void setFromOtherType(int rl, int ru, Array<?> value);
/**
* Set range to given arrays value
*
* @param rl row lower
* @param ru row upper (inclusive)
* @param value value array to take values from (same type) offset by rl.
*/
public final void set(int rl, int ru, Array<T> value) {
set(rl, ru, value, 0);
}
/**
* Set range to given arrays value with an offset into other array
*
* @param rl row lower
* @param ru row upper (inclusive)
* @param value value array to take values from
* @param rlSrc the offset into the value array to take values from
*/
public void set(int rl, int ru, Array<T> value, int rlSrc) {
for(int i = rl, off = rlSrc; i <= ru; i++, off++)
set(i, value.getInternal(off));
}
/**
* Set non default values from the value array given
*
* @param value array of same type and length
*/
public final void setNz(Array<T> value) {
setNz(0, value.size() - 1, value);
}
/**
* Set non default values in the range from the value array given
*
* @param rl row start
* @param ru row upper inclusive
* @param value value array of same type
*/
public abstract void setNz(int rl, int ru, Array<T> value);
/**
* Set non default values from the value array given
*
* @param value array of other type
*/
public final void setFromOtherTypeNz(Array<?> value) {
setFromOtherTypeNz(0, value.size() - 1, value);
}
/**
* Set non default values in the range from the value array given
*
* @param rl row start
* @param ru row end inclusive
* @param value value array of different type
*/
public abstract void setFromOtherTypeNz(int rl, int ru, Array<?> value);
/**
* Append a string value to the current Array, this should in general be avoided, and appending larger blocks at a
* time should be preferred.
*
* @param value The value to append
*/
public abstract void append(String value);
/**
* Append a value of the same type of the Array. This should in general be avoided, and appending larger blocks at a
* time should be preferred.
*
* @param value The value to append
*/
public abstract void append(T value);
/**
* Append other array, if the other array is fitting in current allocated size use that allocated size, otherwise
* allocate new array to combine the other with this.
*
* This method should use the set range function, and should be preferred over the append single values.
*
* @param other The other array of same type to append to this.
* @return The combined arrays.
*/
public abstract Array<T> append(Array<T> other);
/**
* Slice out the sub range and return new array with the specified type.
*
* If the conversion fails fallback to normal slice.
*
* @param rl row start
* @param ru row end (not included)
* @return A new array of sub range.
*/
public abstract Array<T> slice(int rl, int ru);
/**
* Reset the Array and set to a different size. This method is used to reuse an already allocated Array, without
* extra allocation. It should only be done in cases where the Array is no longer in use in any FrameBlocks.
*
* @param size The size to reallocate into.
*/
public abstract void reset(int size);
/**
* Return the current allocated Array as a byte[], this is used to serialize the allocated Arrays out to the
* PythonAPI.
*
* @return The array as bytes
*/
public abstract byte[] getAsByteArray();
/**
* Get the current value type of this array.
*
* @return The current value type.
*/
public abstract ValueType getValueType();
/**
* Analyze the column to figure out if the value type can be refined to a better type. The return is in two parts,
* first the type it can be, second if it contains nulls.
*
* @return A better or equivalent value type to represent the column, including null information.
*/
public final Pair<ValueType, Boolean> analyzeValueType() {
return analyzeValueType(size());
}
/**
* Analyze the column to figure out if the value type can be refined to a better type. The return is in two parts,
* first the type it can be, second if it contains nulls.
*
* @param maxCells maximum number of cells to analyze
* @return A better or equivalent value type to represent the column, including null information.
*/
public abstract Pair<ValueType, Boolean> analyzeValueType(int maxCells);
/**
* Get the internal FrameArrayType, to specify the encoding of the Types, note there are more Frame Array Types than
* there is ValueTypes.
*
* @return The FrameArrayType
*/
public abstract FrameArrayType getFrameArrayType();
/**
* Get in memory size, not counting reference to this object.
*
* @return the size in memory of this object.
*/
public long getInMemorySize() {
return baseMemoryCost();
}
/**
* Get the base memory cost of the Arrays allocation.
*
* @return The base memory cost
*/
public static long baseMemoryCost() {
// Object header , int size, padding, softref.
return 16 + 4 + 4 + 8;
}
/**
* Get the exact serialized size on disk of this array.
*
* @return The exact size on disk
*/
public abstract long getExactSerializedSize();
public ABooleanArray getNulls() {
return null;
}
/**
* analyze if the array contains null values.
*
* @return If the array contains null.
*/
public boolean containsNull() {
return false;
}
public abstract boolean possiblyContainsNaN();
/**
* Change type taking into consideration if the target type must be able to contain Null.
*
* @param t The target type
* @param containsNull If the target should be able to contain null
* @return The changed type array.
*/
public Array<?> changeType(ValueType t, boolean containsNull) {
return containsNull ? changeTypeWithNulls(t) : changeType(t);
}
public Array<?> changeTypeWithNulls(ValueType t) {
if(t == getValueType())
return this;
final ABooleanArray nulls = getNulls();
if(nulls == null || t == ValueType.STRING) // String can contain null.
return changeType(t);
return changeTypeWithNulls(ArrayFactory.allocateOptional(t, size()));
}
public final Array<?> changeTypeWithNulls(Array<?> ret) {
return changeTypeWithNulls(ret, 0, ret.size());
}
public final Array<?> changeTypeWithNulls(Array<?> ret, int l, int u) {
if(ret instanceof OptionalArray)
return changeTypeWithNulls((OptionalArray<?>) ret, l, u);
else
return changeType(ret, l, u);
}
@SuppressWarnings("unchecked")
private OptionalArray<?> changeTypeWithNulls(OptionalArray<?> ret, int l, int u) {
if(this.getValueType() == ValueType.STRING)
ret._n.setNullsFromString(l, u, (Array<String>) this);
else
ret._n.set(l, u - 1, getNulls());
changeType(ret._a, l, u);
return ret;
}
/**
* Change the allocated array to a different type. If the type is the same a deep copy is returned for safety.
*
* @param t The type to change to
* @return A new column array.
*/
public Array<?> changeType(ValueType t) {
if(t == getValueType())
return this;
else
return changeType(ArrayFactory.allocate(t, size()));
}
/**
* Change type by moving this arrays value into the given ret array.
*
* @param ret The Array to put this arrays values into
* @return The ret array given
*/
public final Array<?> changeType(Array<?> ret) {
return changeType(ret, 0, ret.size());
}
/**
* Put the changed value types into the given ret array inside the range specified.
*
* @param ret The Array to put this arrays values into
* @param rl inclusive lower bound
* @param ru exclusive upper bound
* @return The ret array given.
*/
@SuppressWarnings("unchecked")
public final Array<?> changeType(Array<?> ret, int rl, int ru) {
switch(ret.getValueType()) {
case BOOLEAN:
if(ret instanceof BitSetArray || //
(ret instanceof OptionalArray && ((OptionalArray<?>) ret)._a instanceof BitSetArray))
return changeTypeBitSet((Array<Boolean>) ret, rl, ru);
else
return changeTypeBoolean((Array<Boolean>) ret, rl, ru);
case FP32:
return changeTypeFloat((Array<Float>) ret, rl, ru);
case FP64:
return changeTypeDouble((Array<Double>) ret, rl, ru);
case UINT4:
case UINT8:
case INT32:
return changeTypeInteger((Array<Integer>) ret, rl, ru);
case HASH32:
return changeTypeHash32((Array<Object>) ret, rl, ru);
case HASH64:
return changeTypeHash64((Array<Object>) ret, rl, ru);
case INT64:
return changeTypeLong((Array<Long>) ret, rl, ru);
case CHARACTER:
return changeTypeCharacter((Array<Character>) ret, rl, ru);
case UNKNOWN:
case STRING:
default:
return changeTypeString((Array<String>) ret, rl, ru);
}
}
/**
* Change type to a bitSet, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Boolean type of array that is pointing the ret argument
*/
protected abstract Array<Boolean> changeTypeBitSet(Array<Boolean> ret, int l, int u);
/**
* Change type to a boolean array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Boolean type of array that is pointing the ret argument
*/
protected abstract Array<Boolean> changeTypeBoolean(Array<Boolean> ret, int l, int u);
/**
* Change type to a Double array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Double type of array that is pointing the ret argument
*/
protected abstract Array<Double> changeTypeDouble(Array<Double> ret, int l, int u);
/**
* Change type to a Float array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Float type of array that is pointing the ret argument
*/
protected abstract Array<Float> changeTypeFloat(Array<Float> ret, int l, int u);
/**
* Change type to a Integer array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Integer type of array that is pointing the ret argument
*/
protected abstract Array<Integer> changeTypeInteger(Array<Integer> ret, int l, int u);
/**
* Change type to a Long array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Long type of array that is pointing the ret argument
*/
protected abstract Array<Long> changeTypeLong(Array<Long> ret, int l, int u);
/**
* Change type to a Hash64 array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Hash64 type of array that is pointing the ret argument
*/
protected abstract Array<Object> changeTypeHash64(Array<Object> ret, int l, int u);
/**
* Change type to a Hash32 array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Hash64 type of array that is pointing the ret argument
*/
protected abstract Array<Object> changeTypeHash32(Array<Object> ret, int l, int u);
/**
* Change type to a String array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A String type of array that is pointing the ret argument
*/
protected abstract Array<String> changeTypeString(Array<String> ret, int l, int u);
/**
* Change type to a Character array, of underlying longs to store the individual values
*
* @param ret The array to insert the result into
* @param l lower index to convert from (inclusive)
* @param u upper index to convert to (exclusive)
* @return A Character type of array that is pointing the ret argument
*/
protected abstract Array<Character> changeTypeCharacter(Array<Character> ret, int l, int u);
/**
* Get the minimum and maximum length of the contained values as string type.
*
* @return A Pair of first the minimum length, second the maximum length
*/
public Pair<Integer, Integer> getMinMaxLength() {
throw new DMLRuntimeException("Length is only relevant if case is String");
}
/**
* fill the entire array with specific value.
*
* @param val the value to fill with.
*/
public abstract void fill(String val);
/**
* fill the entire array with specific value.
*
* @param val the value to fill with.
*/
public abstract void fill(T val);
/**
* analyze if this array can be shallow serialized. to allow caching without modification.
*
* @return boolean saying true if shallow serialization is available
*/
public abstract boolean isShallowSerialize();
/**
* Get if this array is empty, aka filled with empty values.
*
* @return boolean saying true if empty
*/
public abstract boolean isEmpty();
/**
* Slice out the specified indices and return the sub array.
*
* @param indices The indices to slice out
* @return the sliced out indices in an array format
*/
public abstract Array<T> select(int[] indices);
/**
* Slice out the true indices in the select input and return the sub array.
*
* @param select a boolean vector specifying what to select
* @param nTrue number of true values inside select
* @return the sliced out indices in an array format
*/
public abstract Array<T> select(boolean[] select, int nTrue);
/**
* Find the empty rows, it is assumed that the input is to be only modified to set variables to true.
*
* @param select Modify this to true in indexes that are not empty.
*/
public final void findEmpty(boolean[] select) {
for(int i = 0; i < select.length; i++)
if(isNotEmpty(i))
select[i] = true;
}
public abstract boolean isNotEmpty(int i);
/**
* Find the filled rows, it is assumed that the input i to be only modified to set variables to true;
*
* @param select modify this to true in indexes that are empty.
*/
public void findEmptyInverse(boolean[] select) {
for(int i = 0; i < select.length; i++)
if(!isNotEmpty(i))
select[i] = true;
}
/**
* Overwrite of the java internal clone function for arrays, return a clone of underlying data that is mutable, (not
* immutable data.)
*
* Immutable data is dependent on the individual allocated arrays
*
* @return A clone
*/
@Override
public abstract Array<T> clone();
@Override
public String toString() {
return this.getClass().getSimpleName();
}
/**
* Hash the given index of the array. It is allowed to return NaN on null elements.
*
* @param idx The index to hash
* @return The hash value of that index.
*/
public abstract double hashDouble(int idx);
public ArrayIterator getIterator() {
return new ArrayIterator();
}
@Override
@SuppressWarnings("unchecked")
public boolean equals(Object other) {
return other instanceof Array && //
((Array<?>) other).getValueType() == this.getValueType() && //
this.equals((Array<T>) other);
}
/**
* Extract the sub array into the ret array as doubles.
*
* The ret array is filled from - rl, meaning that the ret array should be of length ru - rl.
*
* @param ret The array to return
* @param rl The row to start at
* @param ru The row to end at (not inclusive.)
* @return The ret array given as argument
*/
public double[] extractDouble(double[] ret, int rl, int ru) {
for(int i = rl; i < ru; i++)
ret[i - rl] = getAsDouble(i);
return ret;
}
/**
* Equals operation on arrays.
*
* @param other The other array to compare to.
* @return True if the arrays are equivalent.
*/
public abstract boolean equals(Array<T> other);
protected int estMemSizePerElement(ValueType vt, long memSize) {
final int memSizePerElement;
switch(vt) {
case UINT4:
case UINT8:
case INT32:
case HASH32:
case FP32:
memSizePerElement = 4;
break;
case INT64:
case FP64:
case HASH64:
memSizePerElement = 8;
break;
case CHARACTER:
memSizePerElement = 2;
break;
case BOOLEAN:
memSizePerElement = 1;
break;
case UNKNOWN:
case STRING:
default:
memSizePerElement = (int) (memSize / size());
}
return memSizePerElement;
}
/**
* Get the compression statistics of this array allocation.
*
* @param nSamples The number of sample elements suggested (not forced) to be used.
* @return The compression statistics of this array.
*/
public ArrayCompressionStatistics statistics(int nSamples) {
Pair<ValueType, Boolean> vt = analyzeValueType(nSamples);
if(vt.getKey() == ValueType.UNKNOWN)
vt = analyzeValueType(); // full analysis if unknown
if(vt.getKey() == ValueType.UNKNOWN)
vt = new Pair<>(ValueType.STRING, false); // if still unknown String.
// memory size is different depending on valuetype.
final long memSize = vt.getKey() != getValueType() ? //
ArrayFactory.getInMemorySize(vt.getKey(), size(), containsNull()) : //
getInMemorySize(); // uncompressed size
final int memSizePerElement = estMemSizePerElement(vt.getKey(), memSize);
final int estDistinct = estimateDistinct(nSamples);
long ddcSize = DDCArray.estimateInMemorySize(memSizePerElement, estDistinct, size());
final boolean sampledAllRows = nSamples == size();
if(ddcSize < memSize)
return new ArrayCompressionStatistics(memSizePerElement, //
estDistinct, true, vt.getKey(), vt.getValue(), FrameArrayType.DDC, getInMemorySize(), ddcSize,
sampledAllRows);
else if(vt.getKey() != getValueType())
return new ArrayCompressionStatistics(memSizePerElement, //
estDistinct, false, vt.getKey(), vt.getValue(), null, getInMemorySize(), memSize, sampledAllRows);
else // do not compress based on dictionary size.
return new ArrayCompressionStatistics(memSizePerElement, //
estDistinct, false, vt.getKey(), vt.getValue(), null, getInMemorySize(), memSize, sampledAllRows);
}
protected int estimateDistinct(int nSamples) {
final HashMap<T, Integer> d = new HashMap<>(Math.min(nSamples / 10, 1024));
// final ACountHashMap<T> d = new CountHashMap<T>(nSamples / 10);
int nSamplesTaken = 0;
for(; nSamplesTaken < nSamples && !earlyAbortEstimateDistinct(d.size(), nSamplesTaken, nSamples);
nSamplesTaken++) {
// d.get(d);
T key = get(nSamplesTaken);
if(d.containsKey(key))
d.put(key, d.get(key) + 1);
else
d.put(key, 1);
}
if(earlyAbortEstimateDistinct(d.size(), nSamplesTaken, nSamples)) {
LOG.warn("Early abort stats and compress : " + nSamplesTaken + " " + nSamples);
return size();
}
final int[] freq = new int[d.size()];
int id = 0;
for(Integer e : d.values())
freq[id++] = e;
return SampleEstimatorFactory.distinctCount(freq, size(), nSamplesTaken);
}
protected boolean earlyAbortEstimateDistinct(int distinctFound, int samplesTaken, int maxSamples) {
return samplesTaken * 100 >= maxSamples * 10 // More than 10 % sampled.
&& distinctFound * 100 >= samplesTaken * 60; // More than 60 % distinct
}
protected int setAndAddToDict(HashMapToInt<T> rcd, AMapToData m, int i, int id) {
final T val = getInternal(i);
final int v = rcd.putIfAbsentI(val, id);
if(v == -1) {
m.set(i, id);
id++;
}
else
m.set(i, v);
return id;
}
public class ArrayIterator implements Iterator<T> {
int index = -1;
public int getIndex() {
return index;
}
@Override
public boolean hasNext() {
return index < size() - 1;
}
@Override
public T next() {
return get(++index);
}
}
/**
* Get the minimum and maximum double value of this array.
*
* Note that we ignore NaN Values.
*
* @return The min and max in index 0 and 1 of the array.
*/
public double[] minMax() {
return minMax(0, size());
}
/**
* Get the minimum and maximum double value of a specific sub part of this array.
*
* Note that we ignore NaN Values.
*
* @param l The lower index to search from
* @param u The upper index to end at (not inclusive)
* @return The min and max in index 0 and 1 of the array in the range.
*/
public double[] minMax(int l, int u) {
double min = Double.POSITIVE_INFINITY;
double max = Double.NEGATIVE_INFINITY;
for(int i = l; i < u; i++) {
final double inVal = getAsDouble(i);
if(!Double.isNaN(inVal)) {
min = Math.min(min, inVal);
max = Math.max(max, inVal);
}
}
return new double[] {min, max};
}
/**
* Set the index i in the map given based on the mapping provided. The map should be guaranteed to contain all unique
* values.
*
* @param map A map containing all unique values of this array
* @param m The MapToData to set the value part of the Map from
* @param i The index to set in m
*/
public void setM(HashMapToInt<T> map, AMapToData m, int i) {
m.set(i, map.getI(getInternal(i)) - 1);
}
/**
* Set the index i in the map given based on the mapping provided. The map should be guaranteed to contain all unique
* values except null. Therefore in case of null we set the provided si value.
*
* @param map A map containing all unique values of this array
* @param si The default value to use in m if this Array contains null at index i
* @param m The MapToData to set the value part of the Map from
* @param i The index to set in m
*/
public void setM(HashMapToInt<T> map, int si, AMapToData m, int i) {
// try {
final T v = getInternal(i);
if(v != null)
m.set(i, map.getI(v) - 1);
else
m.set(i, si);
// }
// catch(Exception e) {
// String error = "expected: " + getInternal(i) + " to be in map: " + map;
// throw new RuntimeException(error, e);
// }
}
}
|
google/exposure-notifications-android | 35,373 | app/src/main/java/com/google/android/apps/exposurenotification/storage/ExposureNotificationSharedPreferences.java | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.android.apps.exposurenotification.storage;
import android.content.Context;
import android.content.SharedPreferences;
import androidx.annotation.AnyThread;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.Transformations;
import com.google.android.apps.exposurenotification.common.BooleanSharedPreferenceLiveData;
import com.google.android.apps.exposurenotification.common.ContainsSharedPreferenceLiveData;
import com.google.android.apps.exposurenotification.common.SharedPreferenceLiveData;
import com.google.android.apps.exposurenotification.common.logging.Logger;
import com.google.android.apps.exposurenotification.common.time.Clock;
import com.google.android.apps.exposurenotification.home.ExposureNotificationViewModel.ExposureNotificationState;
import com.google.android.apps.exposurenotification.riskcalculation.ExposureClassification;
import com.google.android.apps.exposurenotification.storage.DiagnosisEntity.TestResult;
import com.google.common.base.Optional;
import java.security.SecureRandom;
import java.util.Calendar;
import org.threeten.bp.Duration;
import org.threeten.bp.Instant;
/**
* Key value storage for ExposureNotification.
*
* <p>Partners should implement a daily TTL/expiry, for on-device storage of this data, and must
* ensure compliance with all applicable laws and requirements with respect to encryption, storage,
* and retention polices for end user data.
*/
public class ExposureNotificationSharedPreferences {
private static final Logger logger = Logger.getLogger("Preferences");
private static final String SHARED_PREFERENCES_FILE =
"ExposureNotificationSharedPreferences.SHARED_PREFERENCES_FILE";
private static final String ONBOARDING_STATE_KEY =
"ExposureNotificationSharedPreferences.ONBOARDING_STATE_KEY";
private static final String SHARE_ANALYTICS_KEY =
"ExposureNotificationSharedPreferences.SHARE_ANALYTICS_KEY";
private static final String IS_ENABLED_CACHE_KEY =
"ExposureNotificationSharedPreferences.IS_ENABLED_CACHE_KEY";
private static final String EN_STATE_CACHE_KEY =
"ExposureNotificationSharedPreferences.EN_STATE_CACHE_KEY";
private static final String ATTENUATION_THRESHOLD_1_KEY =
"ExposureNotificationSharedPreferences.ATTENUATION_THRESHOLD_1_KEY";
private static final String ATTENUATION_THRESHOLD_2_KEY =
"ExposureNotificationSharedPreferences.ATTENUATION_THRESHOLD_2_KEY";
private static final String EXPOSURE_CLASSIFICATION_INDEX_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_INDEX_KEY";
private static final String EXPOSURE_CLASSIFICATION_NAME_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_NAME_KEY";
private static final String EXPOSURE_CLASSIFICATION_DATE_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_DATE_KEY";
private static final String EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY";
private static final String EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY";
private static final String EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY =
"ExposureNotificationSharedPreferences.EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY";
private static final String ANALYTICS_LOGGING_LAST_TIMESTAMP =
"ExposureNotificationSharedPreferences.ANALYTICS_LOGGING_LAST_TIMESTAMP";
private static final String PROVIDED_DIAGNOSIS_KEY_HEX_TO_LOG_KEY =
"ExposureNotificationSharedPreferences.PROVIDE_DIAGNOSIS_KEY_TO_LOG_KEY";
private static final String HAS_PENDING_RESTORE_NOTIFICATION =
"ExposureNotificationSharedPreferences.HAS_PENDING_RESTORE_NOTIFICATION";
private static final String BLE_LOC_OFF_NOTIFICATION_SEEN =
"ExposureNotificationSharedPreferences.BLE_LOC_OFF_NOTIFICATION_SEEN";
private static final String BEGIN_TIMESTAMP_BLE_LOC_OFF =
"ExposureNotificationSharedPreferences.BEGIN_TIMESTAMP_BLE_LOC_OFF";
private static final String IS_IN_APP_SMS_NOTICE_SEEN =
"ExposureNotificationSharedPreferences.IS_IN_APP_SMS_NOTICE_SEEN";
private static final String IS_PLAY_SMS_NOTICE_SEEN =
"ExposureNotificationSharedPreferences.IS_PLAY_SMS_NOTICE_SEEN";
private static final String HAS_DISPLAYED_ONBOARDING_FOR_MIGRATING_USERS =
"ExposureNotificationSharedPreferences.HAS_DISPLAYED_ONBOARDING_FOR_MIGRATING_USERS";
// Private analytics
private static final String SHARE_PRIVATE_ANALYTICS_KEY =
"ExposureNotificationSharedPreferences.SHARE_PRIVATE_ANALYTICS_KEY";
// The constant value uses the old constant name so that data is not lost when updating the app.
private static final String PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_DAILY =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME";
private static final String PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_BIWEEKLY =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_BIWEEKLY";
private static final String EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME_KEY";
private static final String EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION_KEY";
private static final String EXPOSURE_NOTIFICATION_LAST_INTERACTION_TIME =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_ACTIVE_INTERACTION_TIME_KEY";
private static final String EXPOSURE_NOTIFICATION_LAST_INTERACTION_TYPE =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_ACTIVE_INTERACTION_TYPE_KEY";
private static final String EXPOSURE_NOTIFICATION_LAST_INTERACTION_CLASSIFICATION =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_INTERACTION_CLASSIFICATION_KEY";
private static final String PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME";
private static final String PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME";
private static final String PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME";
private static final String PRIVATE_ANALYTICS_LAST_REPORT_TYPE =
"ExposureNotificationSharedPreferences.PRIVATE_ANALYTICS_LAST_REPORT_TYPE";
private static final String EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS";
private static final String EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_RESPONSE_TIME_MS =
"ExposureNotificationSharedPreferences.EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_TIME_MS";
private static final String BIWEEKLY_METRICS_UPLOAD_DAY =
"ExposureNotificationSharedPreferences.BIWEEKLY_METRICS_UPLOAD_DAY";
private static final String MIGRATION_RUN_OR_NOT_NEEDED =
"ExposureNotificationSharedPreferences.MIGRATION_RUN_OR_NOT_NEEDED";
private final SharedPreferences sharedPreferences;
private final Clock clock;
private final SecureRandom random;
private static AnalyticsStateListener analyticsStateListener;
private final LiveData<Boolean> appAnalyticsStateLiveData;
private final LiveData<Boolean> privateAnalyticsStateLiveData;
private final LiveData<Boolean> isExposureClassificationRevokedLiveData;
private final LiveData<Boolean> isOnboardingStateSetLiveData;
private final LiveData<Boolean> isPrivateAnalyticsStateSetLiveData;
private final LiveData<ExposureClassification> exposureClassificationLiveData;
private final LiveData<BadgeStatus> isExposureClassificationNewLiveData;
private final LiveData<BadgeStatus> isExposureClassificationDateNewLiveData;
private final LiveData<String> providedDiagnosisKeyHexToLogLiveData;
private final LiveData<Boolean> inAppSmsNoticeSeenLiveData;
/**
* Enum for onboarding status.
*/
public enum OnboardingStatus {
UNKNOWN(0),
ONBOARDED(1),
SKIPPED(2);
private final int value;
OnboardingStatus(int value) {
this.value = value;
}
public int value() {
return value;
}
public static OnboardingStatus fromValue(int value) {
switch (value) {
case 1:
return ONBOARDED;
case 2:
return SKIPPED;
default:
return UNKNOWN;
}
}
}
/**
* Enum for "new" badge status.
*/
public enum BadgeStatus {
NEW(0),
SEEN(1),
DISMISSED(2);
private final int value;
BadgeStatus(int value) {
this.value = value;
}
public int value() {
return value;
}
public static BadgeStatus fromValue(int value) {
switch (value) {
case 1:
return SEEN;
case 2:
return DISMISSED;
default:
return NEW;
}
}
}
/**
* Enum for Vaccination Status.
*/
public enum VaccinationStatus {
UNKNOWN(0),
VACCINATED(1),
NOT_VACCINATED(2);
private final int value;
VaccinationStatus(int value) {
this.value = value;
}
public int value() {
return value;
}
public static VaccinationStatus fromValue(int value) {
switch (value) {
case 1:
return VACCINATED;
case 2:
return NOT_VACCINATED;
default:
return UNKNOWN;
}
}
}
/**
* Enum for network handling.
*/
public enum NetworkMode {
// Uses live but test instances of the diagnosis verification, key upload and download servers.
LIVE,
// Bypasses diagnosis verification, key uploads and downloads; no actual network calls.
// Useful to test other components of Exposure Notifications in isolation from the servers.
DISABLED
}
/**
* Enum for onboarding status.
*/
public enum NotificationInteraction {
UNKNOWN(0),
CLICKED(1),
DISMISSED(2);
private final int value;
NotificationInteraction(int value) {
this.value = value;
}
public int value() {
return value;
}
public static NotificationInteraction fromValue(int value) {
switch (value) {
case 1:
return CLICKED;
case 2:
return DISMISSED;
default:
return UNKNOWN;
}
}
}
ExposureNotificationSharedPreferences(Context context, Clock clock, SecureRandom random) {
// These shared preferences are stored in {@value Context#MODE_PRIVATE} to be made only
// accessible by the app.
sharedPreferences = context.getSharedPreferences(SHARED_PREFERENCES_FILE, Context.MODE_PRIVATE);
this.clock = clock;
this.random = random;
this.appAnalyticsStateLiveData =
new BooleanSharedPreferenceLiveData(sharedPreferences, SHARE_ANALYTICS_KEY, false);
this.privateAnalyticsStateLiveData =
new BooleanSharedPreferenceLiveData(sharedPreferences, SHARE_PRIVATE_ANALYTICS_KEY, false);
this.isExposureClassificationRevokedLiveData =
new BooleanSharedPreferenceLiveData(
sharedPreferences, EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY, false);
this.isOnboardingStateSetLiveData =
new ContainsSharedPreferenceLiveData(sharedPreferences, ONBOARDING_STATE_KEY);
this.isPrivateAnalyticsStateSetLiveData =
new ContainsSharedPreferenceLiveData(sharedPreferences, SHARE_PRIVATE_ANALYTICS_KEY);
this.inAppSmsNoticeSeenLiveData = new BooleanSharedPreferenceLiveData(
sharedPreferences, IS_IN_APP_SMS_NOTICE_SEEN, false);
this.exposureClassificationLiveData =
new SharedPreferenceLiveData<ExposureClassification>(
this.sharedPreferences,
EXPOSURE_CLASSIFICATION_INDEX_KEY,
EXPOSURE_CLASSIFICATION_NAME_KEY,
EXPOSURE_CLASSIFICATION_DATE_KEY) {
@Override
protected void updateValue() {
setValue(getExposureClassification());
}
};
this.isExposureClassificationNewLiveData = new SharedPreferenceLiveData<BadgeStatus>(
this.sharedPreferences,
EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY) {
@Override
protected void updateValue() {
setValue(getIsExposureClassificationNew());
}
};
this.isExposureClassificationDateNewLiveData = new SharedPreferenceLiveData<BadgeStatus>(
this.sharedPreferences,
EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY) {
@Override
protected void updateValue() {
setValue(getIsExposureClassificationDateNew());
}
};
this.providedDiagnosisKeyHexToLogLiveData = new SharedPreferenceLiveData<String>(
this.sharedPreferences,
PROVIDED_DIAGNOSIS_KEY_HEX_TO_LOG_KEY) {
@Override
protected void updateValue() {
setValue(getProvidedDiagnosisKeyHexToLog());
}
};
}
public void setHasPendingRestoreNotificationState(boolean enabled) {
sharedPreferences.edit().putBoolean(HAS_PENDING_RESTORE_NOTIFICATION, enabled).commit();
}
public boolean hasPendingRestoreNotification() {
return sharedPreferences.getBoolean(HAS_PENDING_RESTORE_NOTIFICATION, false);
}
public void removeHasPendingRestoreNotificationState() {
sharedPreferences.edit().remove(HAS_PENDING_RESTORE_NOTIFICATION).commit();
}
public void setOnboardedState(boolean onboardedState) {
sharedPreferences
.edit()
.putInt(
ONBOARDING_STATE_KEY,
onboardedState ? OnboardingStatus.ONBOARDED.value() : OnboardingStatus.SKIPPED.value())
.apply();
}
public OnboardingStatus getOnboardedState() {
return OnboardingStatus.fromValue(sharedPreferences.getInt(ONBOARDING_STATE_KEY, 0));
}
public LiveData<Boolean> isOnboardingStateSetLiveData() {
return Transformations.distinctUntilChanged(isOnboardingStateSetLiveData);
}
public LiveData<Boolean> getAppAnalyticsStateLiveData() {
return appAnalyticsStateLiveData;
}
public void setAppAnalyticsState(boolean isEnabled) {
sharedPreferences.edit().putBoolean(SHARE_ANALYTICS_KEY, isEnabled).commit();
if (analyticsStateListener != null) {
analyticsStateListener.onChanged(isEnabled);
}
}
public synchronized void setAnalyticsStateListener(AnalyticsStateListener listener) {
analyticsStateListener = listener;
}
public boolean getAppAnalyticsState() {
return sharedPreferences.getBoolean(SHARE_ANALYTICS_KEY, false);
}
public Optional<Instant> maybeGetAnalyticsLoggingLastTimestamp() {
if (!sharedPreferences.contains(ANALYTICS_LOGGING_LAST_TIMESTAMP)) {
return Optional.absent();
}
return Optional.of(
Instant.ofEpochMilli(sharedPreferences.getLong(ANALYTICS_LOGGING_LAST_TIMESTAMP, 0L)));
}
public void resetAnalyticsLoggingLastTimestamp() {
sharedPreferences.edit().putLong(ANALYTICS_LOGGING_LAST_TIMESTAMP, clock.now().toEpochMilli())
.commit();
}
public void clearAnalyticsLoggingLastTimestamp() {
sharedPreferences.edit().remove(ANALYTICS_LOGGING_LAST_TIMESTAMP).commit();
}
public boolean isAppAnalyticsSet() {
return sharedPreferences.contains(SHARE_ANALYTICS_KEY);
}
public LiveData<Boolean> getPrivateAnalyticsStateLiveData() {
return privateAnalyticsStateLiveData;
}
public boolean getPrivateAnalyticState() {
return sharedPreferences.getBoolean(SHARE_PRIVATE_ANALYTICS_KEY, false);
}
public void setPrivateAnalyticsState(boolean isEnabled) {
logger.d("PrivateAnalyticsState changed, isEnabled= " + isEnabled);
sharedPreferences.edit().putBoolean(SHARE_PRIVATE_ANALYTICS_KEY, isEnabled).commit();
}
public LiveData<Boolean> isPrivateAnalyticsStateSetLiveData() {
return Transformations.distinctUntilChanged(isPrivateAnalyticsStateSetLiveData);
}
public boolean isPrivateAnalyticsStateSet() {
return sharedPreferences.contains(SHARE_PRIVATE_ANALYTICS_KEY);
}
public int getAttenuationThreshold1(int defaultThreshold) {
return sharedPreferences.getInt(ATTENUATION_THRESHOLD_1_KEY, defaultThreshold);
}
public void setAttenuationThreshold1(int threshold) {
sharedPreferences.edit().putInt(ATTENUATION_THRESHOLD_1_KEY, threshold).commit();
}
public int getAttenuationThreshold2(int defaultThreshold) {
return sharedPreferences.getInt(ATTENUATION_THRESHOLD_2_KEY, defaultThreshold);
}
public boolean getIsEnabledCache() {
return sharedPreferences.getBoolean(IS_ENABLED_CACHE_KEY, false);
}
public void setIsEnabledCache(boolean isEnabled) {
sharedPreferences.edit().putBoolean(IS_ENABLED_CACHE_KEY, isEnabled).apply();
}
public int getEnStateCache() {
return sharedPreferences.getInt(
EN_STATE_CACHE_KEY, ExposureNotificationState.DISABLED.ordinal());
}
public void setEnStateCache(int enState) {
sharedPreferences.edit().putInt(EN_STATE_CACHE_KEY, enState).apply();
}
public void setExposureClassification(ExposureClassification exposureClassification) {
sharedPreferences
.edit()
.putInt(
EXPOSURE_CLASSIFICATION_INDEX_KEY,
exposureClassification.getClassificationIndex())
.putString(
EXPOSURE_CLASSIFICATION_NAME_KEY,
exposureClassification.getClassificationName()
)
.putLong(
EXPOSURE_CLASSIFICATION_DATE_KEY,
exposureClassification.getClassificationDate()
)
.commit();
}
public void deleteExposureInformation() {
sharedPreferences.edit()
.remove(EXPOSURE_CLASSIFICATION_INDEX_KEY)
.remove(EXPOSURE_CLASSIFICATION_NAME_KEY)
.remove(EXPOSURE_CLASSIFICATION_DATE_KEY)
.remove(EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY)
.remove(EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY)
.remove(EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY)
.commit();
}
public ExposureClassification getExposureClassification() {
return ExposureClassification.create(
sharedPreferences.getInt(EXPOSURE_CLASSIFICATION_INDEX_KEY,
ExposureClassification.NO_EXPOSURE_CLASSIFICATION_INDEX),
sharedPreferences.getString(EXPOSURE_CLASSIFICATION_NAME_KEY,
ExposureClassification.NO_EXPOSURE_CLASSIFICATION_NAME),
sharedPreferences.getLong(EXPOSURE_CLASSIFICATION_DATE_KEY,
ExposureClassification.NO_EXPOSURE_CLASSIFICATION_DATE));
}
public LiveData<ExposureClassification> getExposureClassificationLiveData() {
return exposureClassificationLiveData;
}
public void setIsExposureClassificationRevoked(boolean isRevoked) {
sharedPreferences.edit().putBoolean(EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY, isRevoked).commit();
}
public boolean getIsExposureClassificationRevoked() {
return sharedPreferences.getBoolean(EXPOSURE_CLASSIFICATION_IS_REVOKED_KEY, false);
}
public LiveData<Boolean> getIsExposureClassificationRevokedLiveData() {
return isExposureClassificationRevokedLiveData;
}
public void setIsExposureClassificationNewAsync(BadgeStatus badgeStatus) {
sharedPreferences.edit()
.putInt(EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY, badgeStatus.value()).apply();
}
// Vaccine Status for Private Analytics
public void setLastVaccinationResponse(Instant responseTime,
VaccinationStatus vaccinationStatus) {
if (getPrivateAnalyticState()) {
sharedPreferences.edit()
.putInt(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS, vaccinationStatus.value())
.putLong(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_RESPONSE_TIME_MS,
responseTime.toEpochMilli())
.apply();
}
}
public VaccinationStatus getLastVaccinationStatus() {
return VaccinationStatus
.fromValue(sharedPreferences.getInt(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS,
VaccinationStatus.UNKNOWN.value()));
}
public Instant getLastVaccinationStatusResponseTime() {
return Instant
.ofEpochMilli(sharedPreferences.getLong(
EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_RESPONSE_TIME_MS, 0L));
}
/*
* If bleLocNotificationSeen is set to true, we clear BEGIN_TIMESTAMP_BLE_LOC_OFF, too,
* since it is not required anymore.
*/
public void setBleLocNotificationSeen(boolean bleLocNotificationSeen) {
sharedPreferences.edit()
.putBoolean(BLE_LOC_OFF_NOTIFICATION_SEEN, bleLocNotificationSeen).apply();
if (bleLocNotificationSeen) {
sharedPreferences.edit().remove(BEGIN_TIMESTAMP_BLE_LOC_OFF).apply();
}
}
public boolean getBleLocNotificationSeen() {
return sharedPreferences.getBoolean(BLE_LOC_OFF_NOTIFICATION_SEEN, false);
}
/*
* Sets a timestamp for the first time we see Ble/Location off.
* If beginTimestampBleLocOff is Optional.absent(), any previously stored value is cleared.
*/
public void setBeginTimestampBleLocOff(Optional<Instant> beginTimestampBleLocOff) {
if (beginTimestampBleLocOff.isPresent()) {
sharedPreferences.edit()
.putLong(BEGIN_TIMESTAMP_BLE_LOC_OFF, beginTimestampBleLocOff.get().toEpochMilli())
.apply();
} else {
sharedPreferences.edit().remove(BEGIN_TIMESTAMP_BLE_LOC_OFF).apply();
}
}
public Optional<Instant> getBeginTimestampBleLocOff() {
long longBeginTimestampBleLocOff = sharedPreferences.getLong(
BEGIN_TIMESTAMP_BLE_LOC_OFF, -1L);
return longBeginTimestampBleLocOff != -1
? Optional.of(Instant.ofEpochMilli(longBeginTimestampBleLocOff))
: Optional.absent();
}
// Notifications for Private Analytics.
public void setExposureNotificationLastShownClassification(Instant exposureNotificationTime,
ExposureClassification exposureClassification) {
if (getPrivateAnalyticState() && exposureClassification.getClassificationIndex() > 0) {
sharedPreferences.edit()
.putInt(EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION,
exposureClassification.getClassificationIndex())
.putLong(EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME, exposureNotificationTime.toEpochMilli())
.putLong(PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME,
exposureClassification.getClassificationDate())
.apply();
}
}
public int getExposureNotificationLastShownClassification() {
return sharedPreferences.getInt(EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION,
ExposureClassification.NO_EXPOSURE_CLASSIFICATION_INDEX);
}
public Instant getExposureNotificationLastShownTime() {
return Instant
.ofEpochMilli(sharedPreferences.getLong(EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME, 0L));
}
public Instant getPrivateAnalyticsLastExposureTime() {
// The date of exposure is stored at a day granularity
return Instant.EPOCH
.plus(Duration.ofDays(sharedPreferences.getLong(PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME, 0L)));
}
// Interaction for Private Analytics.
public void setExposureNotificationLastInteraction(Instant exposureNotificationInteractionTime,
NotificationInteraction interaction,
int classificationIndex) {
if (getPrivateAnalyticState() && classificationIndex > 0) {
sharedPreferences.edit()
.putLong(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TIME,
exposureNotificationInteractionTime.toEpochMilli())
.putInt(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TYPE, interaction.value())
.putInt(EXPOSURE_NOTIFICATION_LAST_INTERACTION_CLASSIFICATION, classificationIndex)
.apply();
}
}
public Instant getExposureNotificationLastInteractionTime() {
return Instant
.ofEpochMilli(sharedPreferences.getLong(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TIME, 0));
}
public NotificationInteraction getExposureNotificationLastInteractionType() {
return NotificationInteraction.fromValue(sharedPreferences.getInt(
EXPOSURE_NOTIFICATION_LAST_INTERACTION_TYPE,
NotificationInteraction.UNKNOWN.value()));
}
public int getExposureNotificationLastInteractionClassification() {
return sharedPreferences.getInt(EXPOSURE_NOTIFICATION_LAST_INTERACTION_CLASSIFICATION,
ExposureClassification.NO_EXPOSURE_CLASSIFICATION_INDEX);
}
// Verification code time for Private Analytics.
public void setPrivateAnalyticsLastSubmittedCodeTime(Instant submittedCodeTime) {
if (getPrivateAnalyticState()) {
sharedPreferences.edit()
.putLong(PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME,
submittedCodeTime.toEpochMilli()).apply();
}
}
public Instant getPrivateAnalyticsLastSubmittedCodeTime() {
return Instant
.ofEpochMilli(sharedPreferences.getLong(PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME, 0));
}
// Submitted keys time for Private Analytics.
public void setPrivateAnalyticsLastSubmittedKeysTime(Instant submittedCodeTime) {
if (getPrivateAnalyticState()) {
sharedPreferences.edit()
.putLong(PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME,
submittedCodeTime.toEpochMilli()).apply();
}
}
public Instant getPrivateAnalyticsLastSubmittedKeysTime() {
return Instant
.ofEpochMilli(sharedPreferences.getLong(PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME, 0));
}
// Last report type for Private Analytics.
public void setPrivateAnalyticsLastReportType(@Nullable TestResult testResult) {
if (getPrivateAnalyticState()) {
int testResultOrdinal = testResult != null ? testResult.ordinal() : -1;
sharedPreferences.edit().putInt(PRIVATE_ANALYTICS_LAST_REPORT_TYPE, testResultOrdinal)
.apply();
}
}
@Nullable
public TestResult getPrivateAnalyticsLastReportType() {
int testResultOrdinal = sharedPreferences.getInt(PRIVATE_ANALYTICS_LAST_REPORT_TYPE, -1);
if (testResultOrdinal < 0 || testResultOrdinal >= TestResult.values().length) {
return null;
}
return TestResult.values()[testResultOrdinal];
}
// Clear the Private Analytics fields.
public void clearPrivateAnalyticsFields() {
sharedPreferences.edit()
.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TIME)
.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TYPE)
.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_CLASSIFICATION)
.remove(EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME)
.remove(EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION)
.remove(PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME)
.remove(PRIVATE_ANALYTICS_LAST_REPORT_TYPE)
.remove(PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME)
.remove(PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME)
.remove(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS)
.remove(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_RESPONSE_TIME_MS)
.apply();
}
public void clearPrivateAnalyticsDailyFieldsBefore(Instant date) {
SharedPreferences.Editor sharedPreferencesEditor = sharedPreferences.edit();
if (getExposureNotificationLastShownTime().isBefore(date)) {
sharedPreferencesEditor.remove(PRIVATE_ANALYTICS_LAST_EXPOSURE_TIME);
}
if (getExposureNotificationLastInteractionTime().isBefore(date)) {
sharedPreferencesEditor.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TIME)
.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_TYPE)
.remove(EXPOSURE_NOTIFICATION_LAST_INTERACTION_CLASSIFICATION);
}
sharedPreferencesEditor.apply();
}
public void clearPrivateAnalyticsBiweeklyFieldsBefore(Instant date) {
SharedPreferences.Editor sharedPreferencesEditor = sharedPreferences.edit();
if (getExposureNotificationLastShownTime().isBefore(date)) {
sharedPreferencesEditor.remove(EXPOSURE_NOTIFICATION_LAST_SHOWN_TIME)
.remove(EXPOSURE_NOTIFICATION_LAST_SHOWN_CLASSIFICATION);
}
if (getPrivateAnalyticsLastSubmittedCodeTime().isBefore(date)) {
sharedPreferencesEditor.remove(PRIVATE_ANALYTICS_VERIFICATION_CODE_TIME)
.remove(PRIVATE_ANALYTICS_LAST_REPORT_TYPE);
}
if (getPrivateAnalyticsLastSubmittedKeysTime().isBefore(date)) {
sharedPreferencesEditor.remove(PRIVATE_ANALYTICS_SUBMITTED_KEYS_TIME);
}
if (getLastVaccinationStatusResponseTime().isBefore(date)) {
sharedPreferencesEditor
.remove(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS_RESPONSE_TIME_MS);
sharedPreferencesEditor.remove(EXPOSURE_NOTIFICATION_LAST_VACCINATION_STATUS);
}
sharedPreferencesEditor.apply();
}
/**
* Returns the last time the private analytics worker ran all daily metrics.
* <p>
* NB: The existence of this value only means the private analytics are enabled in the
* configuration, and does not indicate whether the user enabled or disabled private analytics. It
* only captures when the worker has been running last (it aborts early when the user opted out of
* private analytics).
*/
public void setPrivateAnalyticsWorkerLastTimeForDaily(Instant privateAnalyticsWorkerTime) {
if (getPrivateAnalyticState()) {
sharedPreferences.edit().putLong(PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_DAILY,
privateAnalyticsWorkerTime.toEpochMilli()).apply();
}
}
/**
* Returns the last time the private analytics worker ran all biweekly metrics.
* <p>
* NB: The existence of this value only means the private analytics are enabled in the
* configuration, and does not indicate whether the user enabled or disabled private analytics. It
* only captures when the worker has been running last (it aborts early when the user opted out of
* private analytics).
*/
public void setPrivateAnalyticsWorkerLastTimeForBiweekly(Instant privateAnalyticsWorkerTime) {
if (getPrivateAnalyticState()) {
sharedPreferences.edit().putLong(PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_BIWEEKLY,
privateAnalyticsWorkerTime.toEpochMilli()).apply();
}
}
public Instant getPrivateAnalyticsWorkerLastTimeForDaily() {
return Instant
.ofEpochMilli(
sharedPreferences.getLong(PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_DAILY, 0));
}
public Instant getPrivateAnalyticsWorkerLastTimeForBiweekly() {
return Instant
.ofEpochMilli(
sharedPreferences.getLong(PRIVATE_ANALYTICS_LAST_WORKER_RUN_TIME_FOR_BIWEEKLY, 0));
}
public BadgeStatus getIsExposureClassificationNew() {
return BadgeStatus.fromValue(
sharedPreferences
.getInt(EXPOSURE_CLASSIFICATION_IS_CLASSIFICATION_NEW_KEY, BadgeStatus.NEW.value()));
}
public LiveData<BadgeStatus> getIsExposureClassificationNewLiveData() {
return isExposureClassificationNewLiveData;
}
public void setIsExposureClassificationDateNewAsync(BadgeStatus badgeStatus) {
sharedPreferences.edit()
.putInt(EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY, badgeStatus.value()).apply();
}
public BadgeStatus getIsExposureClassificationDateNew() {
return BadgeStatus.fromValue(
sharedPreferences
.getInt(EXPOSURE_CLASSIFICATION_IS_DATE_NEW_KEY, BadgeStatus.NEW.value()));
}
public LiveData<BadgeStatus> getIsExposureClassificationDateNewLiveData() {
return isExposureClassificationDateNewLiveData;
}
public void setProvidedDiagnosisKeyHexToLog(String keyHex) {
sharedPreferences.edit()
.putString(PROVIDED_DIAGNOSIS_KEY_HEX_TO_LOG_KEY, keyHex)
.commit();
}
public String getProvidedDiagnosisKeyHexToLog() {
return sharedPreferences.getString(PROVIDED_DIAGNOSIS_KEY_HEX_TO_LOG_KEY, "");
}
public LiveData<String> getProvidedDiagnosisKeyHexToLogLiveData() {
return providedDiagnosisKeyHexToLogLiveData;
}
@AnyThread
public void markInAppSmsNoticeSeenAsync() {
sharedPreferences.edit().putBoolean(IS_IN_APP_SMS_NOTICE_SEEN, true).apply();
}
@WorkerThread
public void markInAppSmsNoticeSeen() {
sharedPreferences.edit().putBoolean(IS_IN_APP_SMS_NOTICE_SEEN, true)
.commit();
}
public boolean isInAppSmsNoticeSeen() {
return sharedPreferences.getBoolean(IS_IN_APP_SMS_NOTICE_SEEN, false);
}
public LiveData<Boolean> isInAppSmsNoticeSeenLiveData() {
return inAppSmsNoticeSeenLiveData;
}
@AnyThread
public void setPlaySmsNoticeSeenAsync(boolean isSeen) {
sharedPreferences.edit().putBoolean(IS_PLAY_SMS_NOTICE_SEEN, isSeen).apply();
}
@WorkerThread
public void setPlaySmsNoticeSeen(boolean isSeen) {
sharedPreferences.edit().putBoolean(IS_PLAY_SMS_NOTICE_SEEN, isSeen)
.commit();
}
public boolean isPlaySmsNoticeSeen() {
return sharedPreferences.getBoolean(IS_PLAY_SMS_NOTICE_SEEN, false);
}
public void setBiweeklyMetricsUploadDay(Calendar calendar) {
// We want the SharedPreferences field to match:
// field % 7 + 1 == calendar.day_of_week
// and field % 2 == calendar.week_of_year % 2
int weekDayIndex = calendar.get(Calendar.DAY_OF_WEEK) - 1;
int weekNumberParity = calendar.get(Calendar.WEEK_OF_YEAR) % 2;
sharedPreferences.edit().putInt(BIWEEKLY_METRICS_UPLOAD_DAY,
weekDayIndex + weekNumberParity * 7).apply();
}
public int getBiweeklyMetricsUploadDay() {
// we want to upload some metrics biweekly, but don't want everyone to upload on the same day
// (for example, when the new code gets rolled out), so instead we pick and memorize a random
// fortnightly day.
if (!sharedPreferences.contains(BIWEEKLY_METRICS_UPLOAD_DAY)) {
// Pick a value between 0 and 13 included
// Day of week will be (value % 7 + 1) (Calendar DAY_OF_WEEK has values between 1 and 7)
// and we only upload if current week number % 2 == value / 7
int randomDay = random.nextInt(14);
sharedPreferences.edit().putInt(BIWEEKLY_METRICS_UPLOAD_DAY, randomDay).commit();
}
return sharedPreferences.getInt(BIWEEKLY_METRICS_UPLOAD_DAY, 0);
}
public boolean isMigrationRunOrNotNeeded() {
return sharedPreferences.getBoolean(MIGRATION_RUN_OR_NOT_NEEDED, false);
}
public void markMigrationAsRunOrNotNeeded() {
sharedPreferences.edit().putBoolean(MIGRATION_RUN_OR_NOT_NEEDED, true).apply();
}
@AnyThread
public void markMigratingUserAsOnboardedAsync() {
sharedPreferences.edit().putBoolean(HAS_DISPLAYED_ONBOARDING_FOR_MIGRATING_USERS, true).apply();
}
public boolean isMigratingUserOnboarded() {
return sharedPreferences.getBoolean(HAS_DISPLAYED_ONBOARDING_FOR_MIGRATING_USERS, false);
}
public interface AnalyticsStateListener {
void onChanged(boolean analyticsEnabled);
}
}
|
apache/tajo | 35,557 | tajo-plan/src/main/java/org/apache/tajo/plan/serder/LogicalNodeSerializer.java | /*
* Lisensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.plan.serder;
import com.google.common.collect.Maps;
import org.apache.hadoop.fs.Path;
import org.apache.tajo.algebra.JoinType;
import org.apache.tajo.catalog.SortSpec;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.common.ProtoObject;
import org.apache.tajo.exception.TajoException;
import org.apache.tajo.exception.TajoInternalError;
import org.apache.tajo.exception.NotImplementedException;
import org.apache.tajo.exception.TajoRuntimeException;
import org.apache.tajo.plan.LogicalPlan;
import org.apache.tajo.plan.Target;
import org.apache.tajo.plan.expr.EvalNode;
import org.apache.tajo.plan.logical.*;
import org.apache.tajo.plan.rewrite.rules.IndexScanInfo.SimplePredicate;
import org.apache.tajo.plan.serder.PlanProto.AlterTableNode.AddColumn;
import org.apache.tajo.plan.serder.PlanProto.AlterTableNode.RenameColumn;
import org.apache.tajo.plan.serder.PlanProto.AlterTableNode.RenameTable;
import org.apache.tajo.plan.serder.PlanProto.AlterTablespaceNode.SetLocation;
import org.apache.tajo.plan.serder.PlanProto.LogicalNodeTree;
import org.apache.tajo.plan.visitor.BasicLogicalPlanVisitor;
import org.apache.tajo.util.ProtoUtil;
import java.util.*;
/**
* It serializes a logical plan into a protobuf-based serialized bytes.
*
* In detail, it traverses all logical nodes in a postfix order.
* For each visiting node, it serializes the node and adds the serialized bytes into a list.
* Then, a list will contains a list of serialized nodes in a postfix order.
*
* @see org.apache.tajo.plan.serder.LogicalNodeDeserializer
*/
public class LogicalNodeSerializer extends BasicLogicalPlanVisitor<LogicalNodeSerializer.SerializeContext,
LogicalNode> {
private static final LogicalNodeSerializer instance;
static {
instance = new LogicalNodeSerializer();
}
/**
* Serialize a logical plan into a protobuf-based serialized bytes.
*
* @param node LogicalNode to be serialized
* @return A list of serialized nodes
*/
public static LogicalNodeTree serialize(LogicalNode node) {
SerializeContext context = new SerializeContext();
try {
instance.visit(context, null, null, node, new Stack<>());
} catch (TajoException e) {
throw new TajoInternalError(e);
}
return context.treeBuilder.build();
}
private static PlanProto.LogicalNode.Builder createNodeBuilder(SerializeContext context, LogicalNode node) {
int selfId;
if (context.idMap.containsKey(node.getPID())) {
selfId = context.idMap.get(node.getPID());
} else {
selfId = context.seqId++;
context.idMap.put(node.getPID(), selfId);
}
PlanProto.LogicalNode.Builder nodeBuilder = PlanProto.LogicalNode.newBuilder();
nodeBuilder.setVisitSeq(selfId);
nodeBuilder.setNodeId(node.getPID());
nodeBuilder.setType(convertType(node.getType()));
// some DDL statements like DropTable or DropDatabase do not have in/out schemas
if (node.getInSchema() != null) {
nodeBuilder.setInSchema(node.getInSchema().getProto());
}
if (node.getOutSchema() != null) {
nodeBuilder.setOutSchema(node.getOutSchema().getProto());
}
return nodeBuilder;
}
public static class SerializeContext {
private int seqId = 0;
private Map<Integer, Integer> idMap = Maps.newHashMap(); // map for PID and visit sequence
private LogicalNodeTree.Builder treeBuilder = LogicalNodeTree.newBuilder();
}
@Override
public LogicalNode visitRoot(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
LogicalRootNode root, Stack<LogicalNode> stack) throws TajoException {
super.visitRoot(context, plan, block, root, stack);
int [] childIds = registerGetChildIds(context, root);
PlanProto.RootNode.Builder rootBuilder = PlanProto.RootNode.newBuilder();
rootBuilder.setChildSeq(childIds[0]);
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, root);
nodeBuilder.setRoot(rootBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return root;
}
@Override
public LogicalNode visitSetSession(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
SetSessionNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitSetSession(context, plan, block, node, stack);
PlanProto.SetSessionNode.Builder builder = PlanProto.SetSessionNode.newBuilder();
builder.setName(node.getName());
if (node.hasValue()) {
builder.setValue(node.getValue());
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setSetSession(builder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitEvalExpr(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
EvalExprNode exprEval, Stack<LogicalNode> stack) throws TajoException {
PlanProto.EvalExprNode.Builder exprEvalBuilder = PlanProto.EvalExprNode.newBuilder();
exprEvalBuilder.addAllTargets(
ProtoUtil.<PlanProto.Target>toProtoObjects(exprEval.getTargets().toArray(new ProtoObject[exprEval.getTargets().size()])));
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, exprEval);
nodeBuilder.setExprEval(exprEvalBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return exprEval;
}
@Override
public LogicalNode visitProjection(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
ProjectionNode projection, Stack<LogicalNode> stack) throws TajoException {
super.visitProjection(context, plan, block, projection, stack);
int [] childIds = registerGetChildIds(context, projection);
PlanProto.ProjectionNode.Builder projectionBuilder = PlanProto.ProjectionNode.newBuilder();
projectionBuilder.setChildSeq(childIds[0]);
projectionBuilder.addAllTargets(
ProtoUtil.<PlanProto.Target>toProtoObjects(projection.getTargets().toArray(new ProtoObject[projection.getTargets().size()])));
projectionBuilder.setDistinct(projection.isDistinct());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, projection);
nodeBuilder.setProjection(projectionBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return projection;
}
@Override
public LogicalNode visitLimit(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
LimitNode limit, Stack<LogicalNode> stack) throws TajoException {
super.visitLimit(context, plan, block, limit, stack);
int [] childIds = registerGetChildIds(context, limit);
PlanProto.LimitNode.Builder limitBuilder = PlanProto.LimitNode.newBuilder();
limitBuilder.setChildSeq(childIds[0]);
limitBuilder.setFetchFirstNum(limit.getFetchFirstNum());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, limit);
nodeBuilder.setLimit(limitBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return limit;
}
@Override
public LogicalNode visitWindowAgg(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
WindowAggNode windowAgg, Stack<LogicalNode> stack) throws TajoException {
super.visitWindowAgg(context, plan, block, windowAgg, stack);
int [] childIds = registerGetChildIds(context, windowAgg);
PlanProto.WindowAggNode.Builder windowAggBuilder = PlanProto.WindowAggNode.newBuilder();
windowAggBuilder.setChildSeq(childIds[0]);
if (windowAgg.hasPartitionKeys()) {
windowAggBuilder.addAllPartitionKeys(
ProtoUtil.<CatalogProtos.ColumnProto>toProtoObjects(windowAgg.getPartitionKeys()));
}
if (windowAgg.hasAggFunctions()) {
windowAggBuilder.addAllWindowFunctions(
ProtoUtil.<PlanProto.EvalNodeTree>toProtoObjects(windowAgg.getWindowFunctions()));
}
windowAggBuilder.setDistinct(windowAgg.isDistinct());
if (windowAgg.hasSortSpecs()) {
windowAggBuilder.addAllSortSpecs(
ProtoUtil.<CatalogProtos.SortSpecProto>toProtoObjects(windowAgg.getSortSpecs()));
}
if (windowAgg.hasTargets()) {
windowAggBuilder.addAllTargets(
ProtoUtil.<PlanProto.Target>toProtoObjects(windowAgg.getTargets().toArray(new ProtoObject[windowAgg.getTargets().size()])));
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, windowAgg);
nodeBuilder.setWindowAgg(windowAggBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return windowAgg;
}
@Override
public LogicalNode visitSort(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
SortNode sort, Stack<LogicalNode> stack) throws TajoException {
super.visitSort(context, plan, block, sort, stack);
int [] childIds = registerGetChildIds(context, sort);
PlanProto.SortNode.Builder sortBuilder = PlanProto.SortNode.newBuilder();
sortBuilder.setChildSeq(childIds[0]);
for (int i = 0; i < sort.getSortKeys().length; i++) {
sortBuilder.addSortSpecs(sort.getSortKeys()[i].getProto());
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, sort);
nodeBuilder.setSort(sortBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return sort;
}
@Override
public LogicalNode visitHaving(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
HavingNode having, Stack<LogicalNode> stack) throws TajoException {
super.visitHaving(context, plan, block, having, stack);
int [] childIds = registerGetChildIds(context, having);
PlanProto.FilterNode.Builder filterBuilder = PlanProto.FilterNode.newBuilder();
filterBuilder.setChildSeq(childIds[0]);
filterBuilder.setQual(EvalNodeSerializer.serialize(having.getQual()));
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, having);
nodeBuilder.setFilter(filterBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return having;
}
@Override
public LogicalNode visitGroupBy(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
GroupbyNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitGroupBy(context, plan, block, node, new Stack<>());
PlanProto.LogicalNode.Builder nodeBuilder = buildGroupby(context, node);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
private PlanProto.LogicalNode.Builder buildGroupby(SerializeContext context, GroupbyNode node)
throws TajoException {
int [] childIds = registerGetChildIds(context, node);
PlanProto.GroupbyNode.Builder groupbyBuilder = PlanProto.GroupbyNode.newBuilder();
groupbyBuilder.setChildSeq(childIds[0]);
groupbyBuilder.setDistinct(node.isDistinct());
if (node.groupingKeyNum() > 0) {
groupbyBuilder.addAllGroupingKeys(
ProtoUtil.<CatalogProtos.ColumnProto>toProtoObjects(node.getGroupingColumns()));
}
if (node.hasAggFunctions()) {
groupbyBuilder.addAllAggFunctions(
ProtoUtil.<PlanProto.EvalNodeTree>toProtoObjects(node.getAggFunctions().toArray(new ProtoObject[node.getAggFunctions().size()])));
}
if (node.hasTargets()) {
groupbyBuilder.addAllTargets(ProtoUtil.<PlanProto.Target>toProtoObjects(node.getTargets().toArray(new ProtoObject[node.getTargets().size()])));
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setGroupby(groupbyBuilder);
return nodeBuilder;
}
@Override
public LogicalNode visitDistinctGroupby(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
DistinctGroupbyNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitDistinctGroupby(context, plan, block, node, new Stack<>());
int [] childIds = registerGetChildIds(context, node);
PlanProto.DistinctGroupbyNode.Builder distGroupbyBuilder = PlanProto.DistinctGroupbyNode.newBuilder();
distGroupbyBuilder.setChildSeq(childIds[0]);
if (node.getGroupbyPlan() != null) {
distGroupbyBuilder.setGroupbyNode(buildGroupby(context, node.getGroupbyPlan()));
}
for (GroupbyNode subPlan : node.getSubPlans()) {
distGroupbyBuilder.addSubPlans(buildGroupby(context, subPlan));
}
if (node.getGroupingColumns().length > 0) {
distGroupbyBuilder.addAllGroupingKeys(
ProtoUtil.<CatalogProtos.ColumnProto>toProtoObjects(node.getGroupingColumns()));
}
if (node.getAggFunctions().size() > 0) {
distGroupbyBuilder.addAllAggFunctions(
ProtoUtil.<PlanProto.EvalNodeTree>toProtoObjects(node.getAggFunctions().toArray(new ProtoObject[node.getAggFunctions().size()])));
}
if (node.hasTargets()) {
distGroupbyBuilder.addAllTargets(ProtoUtil.<PlanProto.Target>toProtoObjects(node.getTargets().toArray(new ProtoObject[node.getTargets().size()])));
}
for (int cid : node.getResultColumnIds()) {
distGroupbyBuilder.addResultId(cid);
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setDistinctGroupby(distGroupbyBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitFilter(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
SelectionNode filter, Stack<LogicalNode> stack) throws TajoException {
super.visitFilter(context, plan, block, filter, stack);
int [] childIds = registerGetChildIds(context, filter);
PlanProto.FilterNode.Builder filterBuilder = PlanProto.FilterNode.newBuilder();
filterBuilder.setChildSeq(childIds[0]);
filterBuilder.setQual(EvalNodeSerializer.serialize(filter.getQual()));
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, filter);
nodeBuilder.setFilter(filterBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return filter;
}
@Override
public LogicalNode visitJoin(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode join,
Stack<LogicalNode> stack) throws TajoException {
super.visitJoin(context, plan, block, join, stack);
int [] childIds = registerGetChildIds(context, join);
// building itself
PlanProto.JoinNode.Builder joinBuilder = PlanProto.JoinNode.newBuilder();
joinBuilder.setJoinType(convertJoinType(join.getJoinType()));
joinBuilder.setLeftChildSeq(childIds[0]);
joinBuilder.setRightChilSeq(childIds[1]);
if (join.hasJoinQual()) {
joinBuilder.setJoinQual(EvalNodeSerializer.serialize(join.getJoinQual()));
}
if (join.hasTargets()) {
joinBuilder.setExistsTargets(true);
joinBuilder.addAllTargets(ProtoUtil.<PlanProto.Target>toProtoObjects(join.getTargets().toArray(new ProtoObject[join.getTargets().size()])));
} else {
joinBuilder.setExistsTargets(false);
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, join);
nodeBuilder.setJoin(joinBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return join;
}
@Override
public LogicalNode visitUnion(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block, UnionNode node,
Stack<LogicalNode> stack) throws TajoException {
super.visitUnion(context, plan, block, node, stack);
int [] childIds = registerGetChildIds(context, node);
PlanProto.UnionNode.Builder unionBuilder = PlanProto.UnionNode.newBuilder();
unionBuilder.setAll(true);
unionBuilder.setLeftChildSeq(childIds[0]);
unionBuilder.setRightChildSeq(childIds[1]);
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setUnion(unionBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitScan(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
ScanNode scan, Stack<LogicalNode> stack) throws TajoException {
PlanProto.ScanNode.Builder scanBuilder = buildScanNode(scan);
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, scan);
nodeBuilder.setScan(scanBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return scan;
}
public PlanProto.ScanNode.Builder buildScanNode(ScanNode scan) {
PlanProto.ScanNode.Builder scanBuilder = PlanProto.ScanNode.newBuilder();
scanBuilder.setTable(scan.getTableDesc().getProto());
if (scan.hasAlias()) {
scanBuilder.setAlias(scan.getAlias());
}
if (scan.hasTargets()) {
scanBuilder.setExistTargets(true);
scanBuilder.addAllTargets(ProtoUtil.<PlanProto.Target>toProtoObjects(scan.getTargets().toArray(new ProtoObject[scan.getTargets().size()])));
} else {
scanBuilder.setExistTargets(false);
}
if (scan.hasQual()) {
scanBuilder.setQual(EvalNodeSerializer.serialize(scan.getQual()));
}
scanBuilder.setBroadcast(scan.isBroadcastTable());
scanBuilder.setNameResolveBase(scan.isNameResolveBase());
return scanBuilder;
}
@Override
public LogicalNode visitIndexScan(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
IndexScanNode node, Stack<LogicalNode> stack) throws TajoException {
PlanProto.ScanNode.Builder scanBuilder = buildScanNode(node);
PlanProto.IndexScanSpec.Builder indexScanSpecBuilder = PlanProto.IndexScanSpec.newBuilder();
indexScanSpecBuilder.setKeySchema(node.getKeySchema().getProto());
indexScanSpecBuilder.setIndexPath(node.getIndexPath().toString());
for (SimplePredicate predicate : node.getPredicates()) {
indexScanSpecBuilder.addPredicates(predicate.getProto());
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setScan(scanBuilder);
nodeBuilder.setIndexScan(indexScanSpecBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitPartitionedTableScan(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
PartitionedTableScanNode node, Stack<LogicalNode> stack)
throws TajoException {
PlanProto.ScanNode.Builder scanBuilder = buildScanNode(node);
PlanProto.PartitionScanSpec.Builder partitionScan = PlanProto.PartitionScanSpec.newBuilder();
List<String> pathStrs = new ArrayList<>();
if (node.getInputPaths() != null) {
for (Path p : node.getInputPaths()) {
pathStrs.add(p.toString());
}
partitionScan.addAllPaths(pathStrs);
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setScan(scanBuilder);
nodeBuilder.setPartitionScan(partitionScan);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitTableSubQuery(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
TableSubQueryNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitTableSubQuery(context, plan, block, node, stack);
int [] childIds = registerGetChildIds(context, node);
PlanProto.TableSubQueryNode.Builder builder = PlanProto.TableSubQueryNode.newBuilder();
builder.setChildSeq(childIds[0]);
builder.setTableName(node.getTableName());
if (node.hasTargets()) {
builder.addAllTargets(ProtoUtil.<PlanProto.Target>toProtoObjects(node.getTargets().toArray(new ProtoObject[node.getTargets().size()])));
}
builder.setNameResolveBase(node.isNameResolveBase());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setTableSubQuery(builder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitCreateTable(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
CreateTableNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitCreateTable(context, plan, block, node, stack);
int [] childIds = registerGetChildIds(context, node);
PlanProto.PersistentStoreNode.Builder persistentStoreBuilder = buildPersistentStoreBuilder(node, childIds);
PlanProto.StoreTableNodeSpec.Builder storeTableBuilder = buildStoreTableNodeSpec(node);
PlanProto.CreateTableNodeSpec.Builder createTableBuilder = PlanProto.CreateTableNodeSpec.newBuilder();
if (node.hasTableSpaceName()) {
createTableBuilder.setTablespaceName(node.getTableSpaceName());
}
createTableBuilder.setExternal(node.isExternal());
createTableBuilder.setIfNotExists(node.isIfNotExists());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setPersistentStore(persistentStoreBuilder);
nodeBuilder.setStoreTable(storeTableBuilder);
nodeBuilder.setCreateTable(createTableBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitDropTable(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
DropTableNode node, Stack<LogicalNode> stack) {
PlanProto.DropTableNode.Builder dropTableBuilder = PlanProto.DropTableNode.newBuilder();
dropTableBuilder.setTableName(node.getTableName());
dropTableBuilder.setIfExists(node.isIfExists());
dropTableBuilder.setPurge(node.isPurge());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setDropTable(dropTableBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitAlterTablespace(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
AlterTablespaceNode node, Stack<LogicalNode> stack) throws TajoException {
PlanProto.AlterTablespaceNode.Builder alterTablespaceBuilder = PlanProto.AlterTablespaceNode.newBuilder();
alterTablespaceBuilder.setTableSpaceName(node.getTablespaceName());
switch (node.getSetType()) {
case LOCATION:
alterTablespaceBuilder.setSetType(PlanProto.AlterTablespaceNode.Type.LOCATION);
alterTablespaceBuilder.setSetLocation(SetLocation.newBuilder().setLocation(node.getLocation()));
break;
default:
throw new NotImplementedException("Unknown SET type in ALTER TABLESPACE: " + node.getSetType().name());
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setAlterTablespace(alterTablespaceBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitAlterTable(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
AlterTableNode node, Stack<LogicalNode> stack) {
PlanProto.AlterTableNode.Builder alterTableBuilder = PlanProto.AlterTableNode.newBuilder();
alterTableBuilder.setTableName(node.getTableName());
PlanProto.AlterTableNode.AlterPartition.Builder partitionBuilder = null;
switch (node.getAlterTableOpType()) {
case RENAME_TABLE:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.RENAME_TABLE);
alterTableBuilder.setRenameTable(RenameTable.newBuilder().setNewName(node.getNewTableName()));
break;
case ADD_COLUMN:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.ADD_COLUMN);
alterTableBuilder.setAddColumn(AddColumn.newBuilder().setAddColumn(node.getAddNewColumn().getProto()));
break;
case RENAME_COLUMN:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.RENAME_COLUMN);
alterTableBuilder.setRenameColumn(RenameColumn.newBuilder()
.setOldName(node.getColumnName())
.setNewName(node.getNewColumnName()));
break;
case SET_PROPERTY:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.SET_PROPERTY);
alterTableBuilder.setProperties(node.getProperties().getProto());
break;
case UNSET_PROPERTY:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.UNSET_PROPERTY);
alterTableBuilder.setUnsetPropertyKeys(ProtoUtil.convertStrings(Arrays.asList(node.getUnsetPropertyKeys())));
break;
case ADD_PARTITION:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.ADD_PARTITION);
partitionBuilder = PlanProto.AlterTableNode.AlterPartition.newBuilder();
for (String columnName : node.getPartitionColumns()) {
partitionBuilder.addColumnNames(columnName);
}
for (String partitionValue : node.getPartitionValues()) {
partitionBuilder.addPartitionValues(partitionValue);
}
if (node.getLocation() != null) {
partitionBuilder.setLocation(node.getLocation());
}
alterTableBuilder.setAlterPartition(partitionBuilder);
break;
case DROP_PARTITION:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.DROP_PARTITION);
partitionBuilder = PlanProto.AlterTableNode.AlterPartition.newBuilder();
for (String columnName : node.getPartitionColumns()) {
partitionBuilder.addColumnNames(columnName);
}
for (String partitionValue : node.getPartitionValues()) {
partitionBuilder.addPartitionValues(partitionValue);
}
partitionBuilder.setPurge(node.isPurge());
alterTableBuilder.setAlterPartition(partitionBuilder);
break;
case REPAIR_PARTITION:
alterTableBuilder.setSetType(PlanProto.AlterTableNode.Type.REPAIR_PARTITION);
alterTableBuilder.setTableName(node.getTableName());
break;
default:
throw new TajoRuntimeException(
new NotImplementedException("Unknown SET type in ALTER TABLE: " + node.getAlterTableOpType().name()));
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setAlterTable(alterTableBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitTruncateTable(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
TruncateTableNode node, Stack<LogicalNode> stack) throws TajoException {
PlanProto.TruncateTableNode.Builder truncateTableBuilder = PlanProto.TruncateTableNode.newBuilder();
truncateTableBuilder.addAllTableNames(node.getTableNames());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setTruncateTableNode(truncateTableBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitInsert(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
InsertNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitInsert(context, plan, block, node, stack);
int [] childIds = registerGetChildIds(context, node);
PlanProto.PersistentStoreNode.Builder persistentStoreBuilder = buildPersistentStoreBuilder(node, childIds);
PlanProto.StoreTableNodeSpec.Builder storeTableBuilder = buildStoreTableNodeSpec(node);
PlanProto.InsertNodeSpec.Builder insertNodeSpec = PlanProto.InsertNodeSpec.newBuilder();
insertNodeSpec.setOverwrite(node.isOverwrite());
if (node.hasProjectedSchema()) {
insertNodeSpec.setProjectedSchema(node.getProjectedSchema().getProto());
}
if (node.hasTargetSchema()) {
insertNodeSpec.setTargetSchema(node.getTargetSchema().getProto());
}
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setPersistentStore(persistentStoreBuilder);
nodeBuilder.setStoreTable(storeTableBuilder);
nodeBuilder.setInsert(insertNodeSpec);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
private static PlanProto.PersistentStoreNode.Builder buildPersistentStoreBuilder(PersistentStoreNode node,
int [] childIds) {
PlanProto.PersistentStoreNode.Builder persistentStoreBuilder = PlanProto.PersistentStoreNode.newBuilder();
if (childIds.length > 0) {
// Simple create table may not have any children. This should be improved at TAJO-1589.
persistentStoreBuilder.setChildSeq(childIds[0]);
}
persistentStoreBuilder.setStorageType(node.getStorageType());
if (node.hasOptions()) {
persistentStoreBuilder.setTableProperties(node.getOptions().getProto());
}
return persistentStoreBuilder;
}
private static PlanProto.StoreTableNodeSpec.Builder buildStoreTableNodeSpec(StoreTableNode node) {
PlanProto.StoreTableNodeSpec.Builder storeTableBuilder = PlanProto.StoreTableNodeSpec.newBuilder();
if (node.hasTableName()) { // It will be false if node is for INSERT INTO LOCATION '...'
storeTableBuilder.setTableName(node.getTableName());
}
if (node.hasUri()) {
storeTableBuilder.setUri(node.getUri().toString());
}
if (node.hasTableSchema()) {
storeTableBuilder.setTableSchema(node.getTableSchema().getProto());
}
if (node.hasPartition()) {
storeTableBuilder.setPartitionMethod(node.getPartitionMethod().getProto());
}
return storeTableBuilder;
}
@Override
public LogicalNode visitCreateDatabase(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
CreateDatabaseNode node, Stack<LogicalNode> stack) throws TajoException {
PlanProto.CreateDatabaseNode.Builder createDatabaseBuilder = PlanProto.CreateDatabaseNode.newBuilder();
createDatabaseBuilder.setDbName(node.getDatabaseName());
createDatabaseBuilder.setIfNotExists(node.isIfNotExists());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setCreateDatabase(createDatabaseBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitDropDatabase(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
DropDatabaseNode node, Stack<LogicalNode> stack) throws TajoException {
PlanProto.DropDatabaseNode.Builder dropDatabaseBuilder = PlanProto.DropDatabaseNode.newBuilder();
dropDatabaseBuilder.setDbName(node.getDatabaseName());
dropDatabaseBuilder.setIfExists(node.isIfExists());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setDropDatabase(dropDatabaseBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitCreateIndex(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
CreateIndexNode node, Stack<LogicalNode> stack) throws TajoException {
super.visitCreateIndex(context, plan, block, node, new Stack<>());
PlanProto.CreateIndexNode.Builder createIndexBuilder = PlanProto.CreateIndexNode.newBuilder();
int [] childIds = registerGetChildIds(context, node);
createIndexBuilder.setChildSeq(childIds[0]);
createIndexBuilder.setIndexName(node.getIndexName());
createIndexBuilder.setIndexMethod(node.getIndexMethod());
createIndexBuilder.setIndexPath(node.getIndexPath().toString());
for (SortSpec sortSpec : node.getKeySortSpecs()) {
createIndexBuilder.addKeySortSpecs(sortSpec.getProto());
}
createIndexBuilder.setTargetRelationSchema(node.getTargetRelationSchema().getProto());
createIndexBuilder.setIsUnique(node.isUnique());
createIndexBuilder.setIsClustered(node.isClustered());
if (node.hasOptions()) {
createIndexBuilder.setIndexProperties(node.getOptions().getProto());
}
createIndexBuilder.setIsExternal(node.isExternal());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setCreateIndex(createIndexBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
@Override
public LogicalNode visitDropIndex(SerializeContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
DropIndexNode node, Stack<LogicalNode> stack) {
PlanProto.DropIndexNode.Builder dropIndexBuilder = PlanProto.DropIndexNode.newBuilder();
dropIndexBuilder.setIndexName(node.getIndexName());
PlanProto.LogicalNode.Builder nodeBuilder = createNodeBuilder(context, node);
nodeBuilder.setDropIndex(dropIndexBuilder);
context.treeBuilder.addNodes(nodeBuilder);
return node;
}
public static PlanProto.NodeType convertType(NodeType type) {
return PlanProto.NodeType.valueOf(type.name());
}
public static PlanProto.JoinType convertJoinType(JoinType type) {
switch (type) {
case CROSS:
return PlanProto.JoinType.CROSS_JOIN;
case INNER:
return PlanProto.JoinType.INNER_JOIN;
case LEFT_OUTER:
return PlanProto.JoinType.LEFT_OUTER_JOIN;
case RIGHT_OUTER:
return PlanProto.JoinType.RIGHT_OUTER_JOIN;
case FULL_OUTER:
return PlanProto.JoinType.FULL_OUTER_JOIN;
case LEFT_SEMI:
return PlanProto.JoinType.LEFT_SEMI_JOIN;
case RIGHT_SEMI:
return PlanProto.JoinType.RIGHT_SEMI_JOIN;
case LEFT_ANTI:
return PlanProto.JoinType.LEFT_ANTI_JOIN;
case RIGHT_ANTI:
return PlanProto.JoinType.RIGHT_ANTI_JOIN;
case UNION:
return PlanProto.JoinType.UNION_JOIN;
default:
throw new RuntimeException("Unknown JoinType: " + type.name());
}
}
public static PlanProto.Target convertTarget(Target target) {
PlanProto.Target.Builder targetBuilder = PlanProto.Target.newBuilder();
targetBuilder.setExpr(EvalNodeSerializer.serialize((EvalNode) target.getEvalTree()));
if (target.hasAlias()) {
targetBuilder.setAlias(target.getAlias());
}
return targetBuilder.build();
}
private int [] registerGetChildIds(SerializeContext context, LogicalNode node) {
int [] childIds = new int[node.childNum()];
for (int i = 0; i < node.childNum(); i++) {
if (node.getChild(i) != null && context.idMap.containsKey(node.getChild(i).getPID())) {
childIds[i] = context.idMap.get(node.getChild(i).getPID());
} else {
childIds[i] = context.seqId++;
}
}
return childIds;
}
}
|
googleapis/google-cloud-java | 35,260 | java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/DeleteCloudExadataInfrastructureRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/oracledatabase/v1/oracledatabase.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.oracledatabase.v1;
/**
*
*
* <pre>
* The request for `CloudExadataInfrastructure.Delete`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest}
*/
public final class DeleteCloudExadataInfrastructureRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest)
DeleteCloudExadataInfrastructureRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteCloudExadataInfrastructureRequest.newBuilder() to construct.
private DeleteCloudExadataInfrastructureRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteCloudExadataInfrastructureRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteCloudExadataInfrastructureRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_DeleteCloudExadataInfrastructureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_DeleteCloudExadataInfrastructureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest.class,
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest.Builder
.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 3;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, all VM clusters for this Exadata Infrastructure
* will be deleted. An Exadata Infrastructure can only be deleted once all its
* VM clusters have been deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
if (force_ != false) {
output.writeBool(3, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest)) {
return super.equals(obj);
}
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest other =
(com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request for `CloudExadataInfrastructure.Delete`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest)
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_DeleteCloudExadataInfrastructureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_DeleteCloudExadataInfrastructureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest.class,
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest.Builder
.class);
}
// Construct using
// com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_DeleteCloudExadataInfrastructureRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
getDefaultInstanceForType() {
return com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest build() {
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
buildPartial() {
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest result =
new com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest) {
return mergeFrom(
(com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest other) {
if (other
== com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Cloud Exadata Infrastructure in the following
* format:
* projects/{project}/locations/{location}/cloudExadataInfrastructures/{cloud_exadata_infrastructure}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An optional ID to identify the request. This value is used to
* identify duplicate requests. If you make a request with the same request ID
* and the original request is still in progress or completed, the server
* ignores the second request. This prevents clients from
* accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>
* string request_id = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_info) = { ... }
* </code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, all VM clusters for this Exadata Infrastructure
* will be deleted. An Exadata Infrastructure can only be deleted once all its
* VM clusters have been deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, all VM clusters for this Exadata Infrastructure
* will be deleted. An Exadata Infrastructure can only be deleted once all its
* VM clusters have been deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, all VM clusters for this Exadata Infrastructure
* will be deleted. An Exadata Infrastructure can only be deleted once all its
* VM clusters have been deleted.
* </pre>
*
* <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest)
private static final com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest();
}
public static com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteCloudExadataInfrastructureRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteCloudExadataInfrastructureRequest>() {
@java.lang.Override
public DeleteCloudExadataInfrastructureRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteCloudExadataInfrastructureRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteCloudExadataInfrastructureRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DeleteCloudExadataInfrastructureRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,534 | java-secretmanager/google-cloud-secretmanager/src/main/java/com/google/cloud/secretmanager/v1beta2/stub/GrpcSecretManagerServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.secretmanager.v1beta2.stub;
import static com.google.cloud.secretmanager.v1beta2.SecretManagerServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.secretmanager.v1beta2.SecretManagerServiceClient.ListSecretVersionsPagedResponse;
import static com.google.cloud.secretmanager.v1beta2.SecretManagerServiceClient.ListSecretsPagedResponse;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.secretmanager.v1beta2.AccessSecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.AccessSecretVersionResponse;
import com.google.cloud.secretmanager.v1beta2.AddSecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.CreateSecretRequest;
import com.google.cloud.secretmanager.v1beta2.DeleteSecretRequest;
import com.google.cloud.secretmanager.v1beta2.DestroySecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.DisableSecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.EnableSecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.GetSecretRequest;
import com.google.cloud.secretmanager.v1beta2.GetSecretVersionRequest;
import com.google.cloud.secretmanager.v1beta2.ListSecretVersionsRequest;
import com.google.cloud.secretmanager.v1beta2.ListSecretVersionsResponse;
import com.google.cloud.secretmanager.v1beta2.ListSecretsRequest;
import com.google.cloud.secretmanager.v1beta2.ListSecretsResponse;
import com.google.cloud.secretmanager.v1beta2.Secret;
import com.google.cloud.secretmanager.v1beta2.SecretVersion;
import com.google.cloud.secretmanager.v1beta2.UpdateSecretRequest;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the SecretManagerService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class GrpcSecretManagerServiceStub extends SecretManagerServiceStub {
private static final MethodDescriptor<ListSecretsRequest, ListSecretsResponse>
listSecretsMethodDescriptor =
MethodDescriptor.<ListSecretsRequest, ListSecretsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/ListSecrets")
.setRequestMarshaller(ProtoUtils.marshaller(ListSecretsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListSecretsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateSecretRequest, Secret> createSecretMethodDescriptor =
MethodDescriptor.<CreateSecretRequest, Secret>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/CreateSecret")
.setRequestMarshaller(ProtoUtils.marshaller(CreateSecretRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<AddSecretVersionRequest, SecretVersion>
addSecretVersionMethodDescriptor =
MethodDescriptor.<AddSecretVersionRequest, SecretVersion>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/AddSecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(AddSecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetSecretRequest, Secret> getSecretMethodDescriptor =
MethodDescriptor.<GetSecretRequest, Secret>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/GetSecret")
.setRequestMarshaller(ProtoUtils.marshaller(GetSecretRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateSecretRequest, Secret> updateSecretMethodDescriptor =
MethodDescriptor.<UpdateSecretRequest, Secret>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/UpdateSecret")
.setRequestMarshaller(ProtoUtils.marshaller(UpdateSecretRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Secret.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteSecretRequest, Empty> deleteSecretMethodDescriptor =
MethodDescriptor.<DeleteSecretRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/DeleteSecret")
.setRequestMarshaller(ProtoUtils.marshaller(DeleteSecretRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListSecretVersionsRequest, ListSecretVersionsResponse>
listSecretVersionsMethodDescriptor =
MethodDescriptor.<ListSecretVersionsRequest, ListSecretVersionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/ListSecretVersions")
.setRequestMarshaller(
ProtoUtils.marshaller(ListSecretVersionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListSecretVersionsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetSecretVersionRequest, SecretVersion>
getSecretVersionMethodDescriptor =
MethodDescriptor.<GetSecretVersionRequest, SecretVersion>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/GetSecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(GetSecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionMethodDescriptor =
MethodDescriptor.<AccessSecretVersionRequest, AccessSecretVersionResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/AccessSecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(AccessSecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(AccessSecretVersionResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionMethodDescriptor =
MethodDescriptor.<DisableSecretVersionRequest, SecretVersion>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/DisableSecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(DisableSecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionMethodDescriptor =
MethodDescriptor.<EnableSecretVersionRequest, SecretVersion>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/EnableSecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(EnableSecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionMethodDescriptor =
MethodDescriptor.<DestroySecretVersionRequest, SecretVersion>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/DestroySecretVersion")
.setRequestMarshaller(
ProtoUtils.marshaller(DestroySecretVersionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SecretVersion.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor =
MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/SetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor =
MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.secretmanager.v1beta2.SecretManagerService/GetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.secretmanager.v1beta2.SecretManagerService/TestIamPermissions")
.setRequestMarshaller(
ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor =
MethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ListSecretsRequest, ListSecretsResponse> listSecretsCallable;
private final UnaryCallable<ListSecretsRequest, ListSecretsPagedResponse>
listSecretsPagedCallable;
private final UnaryCallable<CreateSecretRequest, Secret> createSecretCallable;
private final UnaryCallable<AddSecretVersionRequest, SecretVersion> addSecretVersionCallable;
private final UnaryCallable<GetSecretRequest, Secret> getSecretCallable;
private final UnaryCallable<UpdateSecretRequest, Secret> updateSecretCallable;
private final UnaryCallable<DeleteSecretRequest, Empty> deleteSecretCallable;
private final UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsResponse>
listSecretVersionsCallable;
private final UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsPagedResponse>
listSecretVersionsPagedCallable;
private final UnaryCallable<GetSecretVersionRequest, SecretVersion> getSecretVersionCallable;
private final UnaryCallable<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionCallable;
private final UnaryCallable<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionCallable;
private final UnaryCallable<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionCallable;
private final UnaryCallable<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcSecretManagerServiceStub create(SecretManagerServiceStubSettings settings)
throws IOException {
return new GrpcSecretManagerServiceStub(settings, ClientContext.create(settings));
}
public static final GrpcSecretManagerServiceStub create(ClientContext clientContext)
throws IOException {
return new GrpcSecretManagerServiceStub(
SecretManagerServiceStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcSecretManagerServiceStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcSecretManagerServiceStub(
SecretManagerServiceStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcSecretManagerServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcSecretManagerServiceStub(
SecretManagerServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new GrpcSecretManagerServiceCallableFactory());
}
/**
* Constructs an instance of GrpcSecretManagerServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcSecretManagerServiceStub(
SecretManagerServiceStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListSecretsRequest, ListSecretsResponse> listSecretsTransportSettings =
GrpcCallSettings.<ListSecretsRequest, ListSecretsResponse>newBuilder()
.setMethodDescriptor(listSecretsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<CreateSecretRequest, Secret> createSecretTransportSettings =
GrpcCallSettings.<CreateSecretRequest, Secret>newBuilder()
.setMethodDescriptor(createSecretMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<AddSecretVersionRequest, SecretVersion> addSecretVersionTransportSettings =
GrpcCallSettings.<AddSecretVersionRequest, SecretVersion>newBuilder()
.setMethodDescriptor(addSecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetSecretRequest, Secret> getSecretTransportSettings =
GrpcCallSettings.<GetSecretRequest, Secret>newBuilder()
.setMethodDescriptor(getSecretMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateSecretRequest, Secret> updateSecretTransportSettings =
GrpcCallSettings.<UpdateSecretRequest, Secret>newBuilder()
.setMethodDescriptor(updateSecretMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("secret.name", String.valueOf(request.getSecret().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteSecretRequest, Empty> deleteSecretTransportSettings =
GrpcCallSettings.<DeleteSecretRequest, Empty>newBuilder()
.setMethodDescriptor(deleteSecretMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListSecretVersionsRequest, ListSecretVersionsResponse>
listSecretVersionsTransportSettings =
GrpcCallSettings.<ListSecretVersionsRequest, ListSecretVersionsResponse>newBuilder()
.setMethodDescriptor(listSecretVersionsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetSecretVersionRequest, SecretVersion> getSecretVersionTransportSettings =
GrpcCallSettings.<GetSecretVersionRequest, SecretVersion>newBuilder()
.setMethodDescriptor(getSecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionTransportSettings =
GrpcCallSettings.<AccessSecretVersionRequest, AccessSecretVersionResponse>newBuilder()
.setMethodDescriptor(accessSecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionTransportSettings =
GrpcCallSettings.<DisableSecretVersionRequest, SecretVersion>newBuilder()
.setMethodDescriptor(disableSecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionTransportSettings =
GrpcCallSettings.<EnableSecretVersionRequest, SecretVersion>newBuilder()
.setMethodDescriptor(enableSecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionTransportSettings =
GrpcCallSettings.<DestroySecretVersionRequest, SecretVersion>newBuilder()
.setMethodDescriptor(destroySecretVersionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings =
GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
GrpcCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listSecretsCallable =
callableFactory.createUnaryCallable(
listSecretsTransportSettings, settings.listSecretsSettings(), clientContext);
this.listSecretsPagedCallable =
callableFactory.createPagedCallable(
listSecretsTransportSettings, settings.listSecretsSettings(), clientContext);
this.createSecretCallable =
callableFactory.createUnaryCallable(
createSecretTransportSettings, settings.createSecretSettings(), clientContext);
this.addSecretVersionCallable =
callableFactory.createUnaryCallable(
addSecretVersionTransportSettings, settings.addSecretVersionSettings(), clientContext);
this.getSecretCallable =
callableFactory.createUnaryCallable(
getSecretTransportSettings, settings.getSecretSettings(), clientContext);
this.updateSecretCallable =
callableFactory.createUnaryCallable(
updateSecretTransportSettings, settings.updateSecretSettings(), clientContext);
this.deleteSecretCallable =
callableFactory.createUnaryCallable(
deleteSecretTransportSettings, settings.deleteSecretSettings(), clientContext);
this.listSecretVersionsCallable =
callableFactory.createUnaryCallable(
listSecretVersionsTransportSettings,
settings.listSecretVersionsSettings(),
clientContext);
this.listSecretVersionsPagedCallable =
callableFactory.createPagedCallable(
listSecretVersionsTransportSettings,
settings.listSecretVersionsSettings(),
clientContext);
this.getSecretVersionCallable =
callableFactory.createUnaryCallable(
getSecretVersionTransportSettings, settings.getSecretVersionSettings(), clientContext);
this.accessSecretVersionCallable =
callableFactory.createUnaryCallable(
accessSecretVersionTransportSettings,
settings.accessSecretVersionSettings(),
clientContext);
this.disableSecretVersionCallable =
callableFactory.createUnaryCallable(
disableSecretVersionTransportSettings,
settings.disableSecretVersionSettings(),
clientContext);
this.enableSecretVersionCallable =
callableFactory.createUnaryCallable(
enableSecretVersionTransportSettings,
settings.enableSecretVersionSettings(),
clientContext);
this.destroySecretVersionCallable =
callableFactory.createUnaryCallable(
destroySecretVersionTransportSettings,
settings.destroySecretVersionSettings(),
clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListSecretsRequest, ListSecretsResponse> listSecretsCallable() {
return listSecretsCallable;
}
@Override
public UnaryCallable<ListSecretsRequest, ListSecretsPagedResponse> listSecretsPagedCallable() {
return listSecretsPagedCallable;
}
@Override
public UnaryCallable<CreateSecretRequest, Secret> createSecretCallable() {
return createSecretCallable;
}
@Override
public UnaryCallable<AddSecretVersionRequest, SecretVersion> addSecretVersionCallable() {
return addSecretVersionCallable;
}
@Override
public UnaryCallable<GetSecretRequest, Secret> getSecretCallable() {
return getSecretCallable;
}
@Override
public UnaryCallable<UpdateSecretRequest, Secret> updateSecretCallable() {
return updateSecretCallable;
}
@Override
public UnaryCallable<DeleteSecretRequest, Empty> deleteSecretCallable() {
return deleteSecretCallable;
}
@Override
public UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsResponse>
listSecretVersionsCallable() {
return listSecretVersionsCallable;
}
@Override
public UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsPagedResponse>
listSecretVersionsPagedCallable() {
return listSecretVersionsPagedCallable;
}
@Override
public UnaryCallable<GetSecretVersionRequest, SecretVersion> getSecretVersionCallable() {
return getSecretVersionCallable;
}
@Override
public UnaryCallable<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionCallable() {
return accessSecretVersionCallable;
}
@Override
public UnaryCallable<DisableSecretVersionRequest, SecretVersion> disableSecretVersionCallable() {
return disableSecretVersionCallable;
}
@Override
public UnaryCallable<EnableSecretVersionRequest, SecretVersion> enableSecretVersionCallable() {
return enableSecretVersionCallable;
}
@Override
public UnaryCallable<DestroySecretVersionRequest, SecretVersion> destroySecretVersionCallable() {
return destroySecretVersionCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 35,262 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/CreateRollupPropertyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for CreateRollupProperty RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.CreateRollupPropertyRequest}
*/
public final class CreateRollupPropertyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.CreateRollupPropertyRequest)
CreateRollupPropertyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRollupPropertyRequest.newBuilder() to construct.
private CreateRollupPropertyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRollupPropertyRequest() {
sourceProperties_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRollupPropertyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_CreateRollupPropertyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_CreateRollupPropertyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.class,
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.Builder.class);
}
private int bitField0_;
public static final int ROLLUP_PROPERTY_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.Property rollupProperty_;
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the rollupProperty field is set.
*/
@java.lang.Override
public boolean hasRollupProperty() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The rollupProperty.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.Property getRollupProperty() {
return rollupProperty_ == null
? com.google.analytics.admin.v1alpha.Property.getDefaultInstance()
: rollupProperty_;
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.PropertyOrBuilder getRollupPropertyOrBuilder() {
return rollupProperty_ == null
? com.google.analytics.admin.v1alpha.Property.getDefaultInstance()
: rollupProperty_;
}
public static final int SOURCE_PROPERTIES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList sourceProperties_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return A list containing the sourceProperties.
*/
public com.google.protobuf.ProtocolStringList getSourcePropertiesList() {
return sourceProperties_;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The count of sourceProperties.
*/
public int getSourcePropertiesCount() {
return sourceProperties_.size();
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param index The index of the element to return.
* @return The sourceProperties at the given index.
*/
public java.lang.String getSourceProperties(int index) {
return sourceProperties_.get(index);
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param index The index of the value to return.
* @return The bytes of the sourceProperties at the given index.
*/
public com.google.protobuf.ByteString getSourcePropertiesBytes(int index) {
return sourceProperties_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getRollupProperty());
}
for (int i = 0; i < sourceProperties_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceProperties_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRollupProperty());
}
{
int dataSize = 0;
for (int i = 0; i < sourceProperties_.size(); i++) {
dataSize += computeStringSizeNoTag(sourceProperties_.getRaw(i));
}
size += dataSize;
size += 1 * getSourcePropertiesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest other =
(com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest) obj;
if (hasRollupProperty() != other.hasRollupProperty()) return false;
if (hasRollupProperty()) {
if (!getRollupProperty().equals(other.getRollupProperty())) return false;
}
if (!getSourcePropertiesList().equals(other.getSourcePropertiesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasRollupProperty()) {
hash = (37 * hash) + ROLLUP_PROPERTY_FIELD_NUMBER;
hash = (53 * hash) + getRollupProperty().hashCode();
}
if (getSourcePropertiesCount() > 0) {
hash = (37 * hash) + SOURCE_PROPERTIES_FIELD_NUMBER;
hash = (53 * hash) + getSourcePropertiesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for CreateRollupProperty RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.CreateRollupPropertyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.CreateRollupPropertyRequest)
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_CreateRollupPropertyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_CreateRollupPropertyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.class,
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRollupPropertyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
rollupProperty_ = null;
if (rollupPropertyBuilder_ != null) {
rollupPropertyBuilder_.dispose();
rollupPropertyBuilder_ = null;
}
sourceProperties_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_CreateRollupPropertyRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest build() {
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest buildPartial() {
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest result =
new com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.rollupProperty_ =
rollupPropertyBuilder_ == null ? rollupProperty_ : rollupPropertyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
sourceProperties_.makeImmutable();
result.sourceProperties_ = sourceProperties_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest other) {
if (other
== com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest.getDefaultInstance())
return this;
if (other.hasRollupProperty()) {
mergeRollupProperty(other.getRollupProperty());
}
if (!other.sourceProperties_.isEmpty()) {
if (sourceProperties_.isEmpty()) {
sourceProperties_ = other.sourceProperties_;
bitField0_ |= 0x00000002;
} else {
ensureSourcePropertiesIsMutable();
sourceProperties_.addAll(other.sourceProperties_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRollupPropertyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureSourcePropertiesIsMutable();
sourceProperties_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.Property rollupProperty_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>
rollupPropertyBuilder_;
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the rollupProperty field is set.
*/
public boolean hasRollupProperty() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The rollupProperty.
*/
public com.google.analytics.admin.v1alpha.Property getRollupProperty() {
if (rollupPropertyBuilder_ == null) {
return rollupProperty_ == null
? com.google.analytics.admin.v1alpha.Property.getDefaultInstance()
: rollupProperty_;
} else {
return rollupPropertyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRollupProperty(com.google.analytics.admin.v1alpha.Property value) {
if (rollupPropertyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
rollupProperty_ = value;
} else {
rollupPropertyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRollupProperty(
com.google.analytics.admin.v1alpha.Property.Builder builderForValue) {
if (rollupPropertyBuilder_ == null) {
rollupProperty_ = builderForValue.build();
} else {
rollupPropertyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRollupProperty(com.google.analytics.admin.v1alpha.Property value) {
if (rollupPropertyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& rollupProperty_ != null
&& rollupProperty_
!= com.google.analytics.admin.v1alpha.Property.getDefaultInstance()) {
getRollupPropertyBuilder().mergeFrom(value);
} else {
rollupProperty_ = value;
}
} else {
rollupPropertyBuilder_.mergeFrom(value);
}
if (rollupProperty_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRollupProperty() {
bitField0_ = (bitField0_ & ~0x00000001);
rollupProperty_ = null;
if (rollupPropertyBuilder_ != null) {
rollupPropertyBuilder_.dispose();
rollupPropertyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.Property.Builder getRollupPropertyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getRollupPropertyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.PropertyOrBuilder getRollupPropertyOrBuilder() {
if (rollupPropertyBuilder_ != null) {
return rollupPropertyBuilder_.getMessageOrBuilder();
} else {
return rollupProperty_ == null
? com.google.analytics.admin.v1alpha.Property.getDefaultInstance()
: rollupProperty_;
}
}
/**
*
*
* <pre>
* Required. The roll-up property to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.Property rollup_property = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>
getRollupPropertyFieldBuilder() {
if (rollupPropertyBuilder_ == null) {
rollupPropertyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>(
getRollupProperty(), getParentForChildren(), isClean());
rollupProperty_ = null;
}
return rollupPropertyBuilder_;
}
private com.google.protobuf.LazyStringArrayList sourceProperties_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureSourcePropertiesIsMutable() {
if (!sourceProperties_.isModifiable()) {
sourceProperties_ = new com.google.protobuf.LazyStringArrayList(sourceProperties_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return A list containing the sourceProperties.
*/
public com.google.protobuf.ProtocolStringList getSourcePropertiesList() {
sourceProperties_.makeImmutable();
return sourceProperties_;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The count of sourceProperties.
*/
public int getSourcePropertiesCount() {
return sourceProperties_.size();
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param index The index of the element to return.
* @return The sourceProperties at the given index.
*/
public java.lang.String getSourceProperties(int index) {
return sourceProperties_.get(index);
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param index The index of the value to return.
* @return The bytes of the sourceProperties at the given index.
*/
public com.google.protobuf.ByteString getSourcePropertiesBytes(int index) {
return sourceProperties_.getByteString(index);
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param index The index to set the value at.
* @param value The sourceProperties to set.
* @return This builder for chaining.
*/
public Builder setSourceProperties(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSourcePropertiesIsMutable();
sourceProperties_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The sourceProperties to add.
* @return This builder for chaining.
*/
public Builder addSourceProperties(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureSourcePropertiesIsMutable();
sourceProperties_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param values The sourceProperties to add.
* @return This builder for chaining.
*/
public Builder addAllSourceProperties(java.lang.Iterable<java.lang.String> values) {
ensureSourcePropertiesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sourceProperties_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceProperties() {
sourceProperties_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The resource names of properties that will be sources to the
* created roll-up property.
* </pre>
*
* <code>repeated string source_properties = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes of the sourceProperties to add.
* @return This builder for chaining.
*/
public Builder addSourcePropertiesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureSourcePropertiesIsMutable();
sourceProperties_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.CreateRollupPropertyRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.CreateRollupPropertyRequest)
private static final com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest();
}
public static com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRollupPropertyRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRollupPropertyRequest>() {
@java.lang.Override
public CreateRollupPropertyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRollupPropertyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRollupPropertyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.CreateRollupPropertyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,292 | java-video-intelligence/proto-google-cloud-video-intelligence-v1/src/main/java/com/google/cloud/videointelligence/v1/AnnotateVideoProgress.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1;
/**
*
*
* <pre>
* Video annotation progress. Included in the `metadata`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1.AnnotateVideoProgress}
*/
public final class AnnotateVideoProgress extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.AnnotateVideoProgress)
AnnotateVideoProgressOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoProgress.newBuilder() to construct.
private AnnotateVideoProgress(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoProgress() {
annotationProgress_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoProgress();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_AnnotateVideoProgress_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1.AnnotateVideoProgress.class,
com.google.cloud.videointelligence.v1.AnnotateVideoProgress.Builder.class);
}
public static final int ANNOTATION_PROGRESS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationProgress>
annotationProgress_;
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationProgress>
getAnnotationProgressList() {
return annotationProgress_;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder>
getAnnotationProgressOrBuilderList() {
return annotationProgress_;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationProgressCount() {
return annotationProgress_.size();
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1.VideoAnnotationProgress getAnnotationProgress(
int index) {
return annotationProgress_.get(index);
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder
getAnnotationProgressOrBuilder(int index) {
return annotationProgress_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationProgress_.size(); i++) {
output.writeMessage(1, annotationProgress_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationProgress_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationProgress_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1.AnnotateVideoProgress)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1.AnnotateVideoProgress other =
(com.google.cloud.videointelligence.v1.AnnotateVideoProgress) obj;
if (!getAnnotationProgressList().equals(other.getAnnotationProgressList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationProgressCount() > 0) {
hash = (37 * hash) + ANNOTATION_PROGRESS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationProgressList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1.AnnotateVideoProgress prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation progress. Included in the `metadata`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1.AnnotateVideoProgress}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.AnnotateVideoProgress)
com.google.cloud.videointelligence.v1.AnnotateVideoProgressOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_AnnotateVideoProgress_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1.AnnotateVideoProgress.class,
com.google.cloud.videointelligence.v1.AnnotateVideoProgress.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1.AnnotateVideoProgress.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationProgressBuilder_ == null) {
annotationProgress_ = java.util.Collections.emptyList();
} else {
annotationProgress_ = null;
annotationProgressBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.AnnotateVideoProgress getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1.AnnotateVideoProgress.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.AnnotateVideoProgress build() {
com.google.cloud.videointelligence.v1.AnnotateVideoProgress result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.AnnotateVideoProgress buildPartial() {
com.google.cloud.videointelligence.v1.AnnotateVideoProgress result =
new com.google.cloud.videointelligence.v1.AnnotateVideoProgress(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1.AnnotateVideoProgress result) {
if (annotationProgressBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationProgress_ = java.util.Collections.unmodifiableList(annotationProgress_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationProgress_ = annotationProgress_;
} else {
result.annotationProgress_ = annotationProgressBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.videointelligence.v1.AnnotateVideoProgress result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1.AnnotateVideoProgress) {
return mergeFrom((com.google.cloud.videointelligence.v1.AnnotateVideoProgress) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.videointelligence.v1.AnnotateVideoProgress other) {
if (other == com.google.cloud.videointelligence.v1.AnnotateVideoProgress.getDefaultInstance())
return this;
if (annotationProgressBuilder_ == null) {
if (!other.annotationProgress_.isEmpty()) {
if (annotationProgress_.isEmpty()) {
annotationProgress_ = other.annotationProgress_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationProgressIsMutable();
annotationProgress_.addAll(other.annotationProgress_);
}
onChanged();
}
} else {
if (!other.annotationProgress_.isEmpty()) {
if (annotationProgressBuilder_.isEmpty()) {
annotationProgressBuilder_.dispose();
annotationProgressBuilder_ = null;
annotationProgress_ = other.annotationProgress_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationProgressBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationProgressFieldBuilder()
: null;
} else {
annotationProgressBuilder_.addAllMessages(other.annotationProgress_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1.VideoAnnotationProgress m =
input.readMessage(
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.parser(),
extensionRegistry);
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(m);
} else {
annotationProgressBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationProgress>
annotationProgress_ = java.util.Collections.emptyList();
private void ensureAnnotationProgressIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationProgress_ =
new java.util.ArrayList<com.google.cloud.videointelligence.v1.VideoAnnotationProgress>(
annotationProgress_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder>
annotationProgressBuilder_;
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationProgress>
getAnnotationProgressList() {
if (annotationProgressBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationProgress_);
} else {
return annotationProgressBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public int getAnnotationProgressCount() {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.size();
} else {
return annotationProgressBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1.VideoAnnotationProgress getAnnotationProgress(
int index) {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.get(index);
} else {
return annotationProgressBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder setAnnotationProgress(
int index, com.google.cloud.videointelligence.v1.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.set(index, value);
onChanged();
} else {
annotationProgressBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder setAnnotationProgress(
int index,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.set(index, builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
com.google.cloud.videointelligence.v1.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.add(value);
onChanged();
} else {
annotationProgressBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
int index, com.google.cloud.videointelligence.v1.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.add(index, value);
onChanged();
} else {
annotationProgressBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
int index,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(index, builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAllAnnotationProgress(
java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.VideoAnnotationProgress>
values) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationProgress_);
onChanged();
} else {
annotationProgressBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder clearAnnotationProgress() {
if (annotationProgressBuilder_ == null) {
annotationProgress_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationProgressBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder removeAnnotationProgress(int index) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.remove(index);
onChanged();
} else {
annotationProgressBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder
getAnnotationProgressBuilder(int index) {
return getAnnotationProgressFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder
getAnnotationProgressOrBuilder(int index) {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.get(index);
} else {
return annotationProgressBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder>
getAnnotationProgressOrBuilderList() {
if (annotationProgressBuilder_ != null) {
return annotationProgressBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationProgress_);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder
addAnnotationProgressBuilder() {
return getAnnotationProgressFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.getDefaultInstance());
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder
addAnnotationProgressBuilder(int index) {
return getAnnotationProgressFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.getDefaultInstance());
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder>
getAnnotationProgressBuilderList() {
return getAnnotationProgressFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder>
getAnnotationProgressFieldBuilder() {
if (annotationProgressBuilder_ == null) {
annotationProgressBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1.VideoAnnotationProgressOrBuilder>(
annotationProgress_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationProgress_ = null;
}
return annotationProgressBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.AnnotateVideoProgress)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.AnnotateVideoProgress)
private static final com.google.cloud.videointelligence.v1.AnnotateVideoProgress DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.AnnotateVideoProgress();
}
public static com.google.cloud.videointelligence.v1.AnnotateVideoProgress getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoProgress> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoProgress>() {
@java.lang.Override
public AnnotateVideoProgress parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoProgress> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoProgress> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1.AnnotateVideoProgress getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-statistics | 35,315 | commons-statistics-descriptive/src/main/java/org/apache/commons/statistics/descriptive/Quantile.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.statistics.descriptive;
import java.util.Arrays;
import java.util.Objects;
import java.util.function.IntToDoubleFunction;
import org.apache.commons.numbers.arrays.Selection;
/**
* Provides quantile computation.
*
* <p>For values of length {@code n}:
* <ul>
* <li>The result is {@code NaN} if {@code n = 0}.
* <li>The result is {@code values[0]} if {@code n = 1}.
* <li>Otherwise the result is computed using the {@link EstimationMethod}.
* </ul>
*
* <p>Computation of multiple quantiles and will handle duplicate and unordered
* probabilities. Passing ordered probabilities is recommended if the order is already
* known as this can improve efficiency; for example using uniform spacing through the
* array data, or to identify extreme values from the data such as {@code [0.001, 0.999]}.
*
* <p>This implementation respects the ordering imposed by
* {@link Double#compare(double, double)} for {@code NaN} values. If a {@code NaN} occurs
* in the selected positions in the fully sorted values then the result is {@code NaN}.
*
* <p>The {@link NaNPolicy} can be used to change the behaviour on {@code NaN} values.
*
* <p>Instances of this class are immutable and thread-safe.
*
* @see #with(NaNPolicy)
* @see <a href="http://en.wikipedia.org/wiki/Quantile">Quantile (Wikipedia)</a>
* @since 1.1
*/
public final class Quantile {
/** Message when the probability is not in the range {@code [0, 1]}. */
private static final String INVALID_PROBABILITY = "Invalid probability: ";
/** Message when no probabilities are provided for the varargs method. */
private static final String NO_PROBABILITIES_SPECIFIED = "No probabilities specified";
/** Message when the size is not valid. */
private static final String INVALID_SIZE = "Invalid size: ";
/** Message when the number of probabilities in a range is not valid. */
private static final String INVALID_NUMBER_OF_PROBABILITIES = "Invalid number of probabilities: ";
/** Default instance. Method 8 is recommended by Hyndman and Fan. */
private static final Quantile DEFAULT = new Quantile(false, NaNPolicy.INCLUDE, EstimationMethod.HF8);
/** Flag to indicate if the data should be copied. */
private final boolean copy;
/** NaN policy for floating point data. */
private final NaNPolicy nanPolicy;
/** Transformer for NaN data. */
private final NaNTransformer nanTransformer;
/** Estimation type used to determine the value from the quantile. */
private final EstimationMethod estimationType;
/**
* @param copy Flag to indicate if the data should be copied.
* @param nanPolicy NaN policy.
* @param estimationType Estimation type used to determine the value from the quantile.
*/
private Quantile(boolean copy, NaNPolicy nanPolicy, EstimationMethod estimationType) {
this.copy = copy;
this.nanPolicy = nanPolicy;
this.estimationType = estimationType;
nanTransformer = NaNTransformers.createNaNTransformer(nanPolicy, copy);
}
/**
* Return a new instance with the default options.
*
* <ul>
* <li>{@linkplain #withCopy(boolean) Copy = false}
* <li>{@linkplain #with(NaNPolicy) NaN policy = include}
* <li>{@linkplain #with(EstimationMethod) Estimation method = HF8}
* </ul>
*
* <p>Note: The default options configure for processing in-place and including
* {@code NaN} values in the data. This is the most efficient mode and has the
* smallest memory consumption.
*
* @return the quantile implementation
* @see #withCopy(boolean)
* @see #with(NaNPolicy)
* @see #with(EstimationMethod)
*/
public static Quantile withDefaults() {
return DEFAULT;
}
/**
* Return an instance with the configured copy behaviour. If {@code false} then
* the input array will be modified by the call to evaluate the quantiles; otherwise
* the computation uses a copy of the data.
*
* @param v Value.
* @return an instance
*/
public Quantile withCopy(boolean v) {
return new Quantile(v, nanPolicy, estimationType);
}
/**
* Return an instance with the configured {@link NaNPolicy}.
*
* <p>Note: This implementation respects the ordering imposed by
* {@link Double#compare(double, double)} for {@code NaN} values: {@code NaN} is
* considered greater than all other values, and all {@code NaN} values are equal. The
* {@link NaNPolicy} changes the computation of the statistic in the presence of
* {@code NaN} values.
*
* <ul>
* <li>{@link NaNPolicy#INCLUDE}: {@code NaN} values are moved to the end of the data;
* the size of the data <em>includes</em> the {@code NaN} values and the quantile will be
* {@code NaN} if any value used for quantile interpolation is {@code NaN}.
* <li>{@link NaNPolicy#EXCLUDE}: {@code NaN} values are moved to the end of the data;
* the size of the data <em>excludes</em> the {@code NaN} values and the quantile will
* never be {@code NaN} for non-zero size. If all data are {@code NaN} then the size is zero
* and the result is {@code NaN}.
* <li>{@link NaNPolicy#ERROR}: An exception is raised if the data contains {@code NaN}
* values.
* </ul>
*
* <p>Note that the result is identical for all policies if no {@code NaN} values are present.
*
* @param v Value.
* @return an instance
*/
public Quantile with(NaNPolicy v) {
return new Quantile(copy, Objects.requireNonNull(v), estimationType);
}
/**
* Return an instance with the configured {@link EstimationMethod}.
*
* @param v Value.
* @return an instance
*/
public Quantile with(EstimationMethod v) {
return new Quantile(copy, nanPolicy, Objects.requireNonNull(v));
}
/**
* Generate {@code n} evenly spaced probabilities in the range {@code [0, 1]}.
*
* <pre>
* 1/(n + 1), 2/(n + 1), ..., n/(n + 1)
* </pre>
*
* @param n Number of probabilities.
* @return the probabilities
* @throws IllegalArgumentException if {@code n < 1}
*/
public static double[] probabilities(int n) {
checkNumberOfProbabilities(n);
final double c1 = n + 1.0;
final double[] p = new double[n];
for (int i = 0; i < n; i++) {
p[i] = (i + 1.0) / c1;
}
return p;
}
/**
* Generate {@code n} evenly spaced probabilities in the range {@code [p1, p2]}.
*
* <pre>
* w = p2 - p1
* p1 + w/(n + 1), p1 + 2w/(n + 1), ..., p1 + nw/(n + 1)
* </pre>
*
* @param n Number of probabilities.
* @param p1 Lower probability.
* @param p2 Upper probability.
* @return the probabilities
* @throws IllegalArgumentException if {@code n < 1}; if the probabilities are not in the
* range {@code [0, 1]}; or {@code p2 <= p1}.
*/
public static double[] probabilities(int n, double p1, double p2) {
checkProbability(p1);
checkProbability(p2);
if (p2 <= p1) {
throw new IllegalArgumentException("Invalid range: [" + p1 + ", " + p2 + "]");
}
final double[] p = probabilities(n);
for (int i = 0; i < n; i++) {
p[i] = (1 - p[i]) * p1 + p[i] * p2;
}
return p;
}
/**
* Evaluate the {@code p}-th quantile of the values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* <p><strong>Performance</strong>
*
* <p>It is not recommended to use this method for repeat calls for different quantiles
* within the same values. The {@link #evaluate(double[], double...)} method should be used
* which provides better performance.
*
* @param values Values.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]};
* or if the values contain NaN and the configuration is {@link NaNPolicy#ERROR}
* @see #evaluate(double[], double...)
* @see #with(NaNPolicy)
*/
public double evaluate(double[] values, double p) {
return compute(values, 0, values.length, p);
}
/**
* Evaluate the {@code p}-th quantile of the specified range of values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* <p><strong>Performance</strong>
*
* <p>It is not recommended to use this method for repeat calls for different quantiles
* within the same values. The {@link #evaluateRange(double[], int, int, double...)} method should be used
* which provides better performance.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]};
* or if the values contain NaN and the configuration is {@link NaNPolicy#ERROR}
* @throws IndexOutOfBoundsException if the sub-range is out of bounds
* @see #evaluateRange(double[], int, int, double...)
* @see #with(NaNPolicy)
* @since 1.2
*/
public double evaluateRange(double[] values, int from, int to, double p) {
Statistics.checkFromToIndex(from, to, values.length);
return compute(values, from, to, p);
}
/**
* Compute the {@code p}-th quantile of the specified range of values.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]}
*/
private double compute(double[] values, int from, int to, double p) {
checkProbability(p);
// Floating-point data handling
final int[] bounds = new int[2];
final double[] x = nanTransformer.apply(values, from, to, bounds);
final int start = bounds[0];
final int end = bounds[1];
final int n = end - start;
// Special cases
if (n <= 1) {
return n == 0 ? Double.NaN : x[start];
}
final double pos = estimationType.index(p, n);
final int ip = (int) pos;
final int i = start + ip;
// Partition and compute
if (pos > ip) {
Selection.select(x, start, end, new int[] {i, i + 1});
return Interpolation.interpolate(x[i], x[i + 1], pos - ip);
}
Selection.select(x, start, end, i);
return x[i];
}
/**
* Evaluate the {@code p}-th quantiles of the values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* @param values Values.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* no probabilities are specified; or if the values contain NaN and the configuration is {@link NaNPolicy#ERROR}
* @see #with(NaNPolicy)
*/
public double[] evaluate(double[] values, double... p) {
return compute(values, 0, values.length, p);
}
/**
* Evaluate the {@code p}-th quantiles of the specified range of values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* no probabilities are specified; or if the values contain NaN and the configuration is {@link NaNPolicy#ERROR}
* @throws IndexOutOfBoundsException if the sub-range is out of bounds
* @see #with(NaNPolicy)
* @since 1.2
*/
public double[] evaluateRange(double[] values, int from, int to, double... p) {
Statistics.checkFromToIndex(from, to, values.length);
return compute(values, from, to, p);
}
/**
* Compute the {@code p}-th quantiles of the specified range of values.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* or no probabilities are specified.
*/
private double[] compute(double[] values, int from, int to, double... p) {
checkProbabilities(p);
// Floating-point data handling
final int[] bounds = new int[2];
final double[] x = nanTransformer.apply(values, from, to, bounds);
final int start = bounds[0];
final int end = bounds[1];
final int n = end - start;
// Special cases
final double[] q = new double[p.length];
if (n <= 1) {
Arrays.fill(q, n == 0 ? Double.NaN : x[start]);
return q;
}
// Collect interpolation positions. We use the output q as storage.
final int[] indices = computeIndices(n, p, q, start);
// Partition
Selection.select(x, start, end, indices);
// Compute
for (int k = 0; k < p.length; k++) {
// ip in [0, n); i in [start, end)
final int ip = (int) q[k];
final int i = start + ip;
if (q[k] > ip) {
q[k] = Interpolation.interpolate(x[i], x[i + 1], q[k] - ip);
} else {
q[k] = x[i];
}
}
return q;
}
/**
* Evaluate the {@code p}-th quantile of the values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* <p><strong>Performance</strong>
*
* <p>It is not recommended to use this method for repeat calls for different quantiles
* within the same values. The {@link #evaluate(int[], double...)} method should be used
* which provides better performance.
*
* @param values Values.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]}
* @see #evaluate(int[], double...)
*/
public double evaluate(int[] values, double p) {
return compute(values, 0, values.length, p);
}
/**
* Evaluate the {@code p}-th quantile of the specified range of values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* <p><strong>Performance</strong>
*
* <p>It is not recommended to use this method for repeat calls for different quantiles
* within the same values. The {@link #evaluateRange(int[], int, int, double...)} method should be used
* which provides better performance.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]}
* @throws IndexOutOfBoundsException if the sub-range is out of bounds
* @see #evaluateRange(int[], int, int, double...)
* @since 1.2
*/
public double evaluateRange(int[] values, int from, int to, double p) {
Statistics.checkFromToIndex(from, to, values.length);
return compute(values, from, to, p);
}
/**
* Compute the {@code p}-th quantile of the specified range of values.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if the probability {@code p} is not in the range {@code [0, 1]}
*/
private double compute(int[] values, int from, int to, double p) {
checkProbability(p);
final int n = to - from;
// Special cases
if (n <= 1) {
return n == 0 ? Double.NaN : values[from];
}
// Create the range
final int[] x;
final int start;
final int end;
if (copy) {
x = Statistics.copy(values, from, to);
start = 0;
end = n;
} else {
x = values;
start = from;
end = to;
}
final double pos = estimationType.index(p, n);
final int ip = (int) pos;
final int i = start + ip;
// Partition and compute
if (pos > ip) {
Selection.select(x, start, end, new int[] {i, i + 1});
return Interpolation.interpolate(x[i], x[i + 1], pos - ip);
}
Selection.select(x, start, end, i);
return x[i];
}
/**
* Evaluate the {@code p}-th quantiles of the values.
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* @param values Values.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* or no probabilities are specified.
*/
public double[] evaluate(int[] values, double... p) {
return compute(values, 0, values.length, p);
}
/**
* Evaluate the {@code p}-th quantiles of the specified range of values..
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* or no probabilities are specified.
* @throws IndexOutOfBoundsException if the sub-range is out of bounds
* @since 1.2
*/
public double[] evaluateRange(int[] values, int from, int to, double... p) {
Statistics.checkFromToIndex(from, to, values.length);
return compute(values, from, to, p);
}
/**
* Evaluate the {@code p}-th quantiles of the specified range of values..
*
* <p>Note: This method may partially sort the input values if not configured to
* {@link #withCopy(boolean) copy} the input data.
*
* @param values Values.
* @param from Inclusive start of the range.
* @param to Exclusive end of the range.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if any probability {@code p} is not in the range {@code [0, 1]};
* or no probabilities are specified.
*/
private double[] compute(int[] values, int from, int to, double... p) {
checkProbabilities(p);
final int n = to - from;
// Special cases
final double[] q = new double[p.length];
if (n <= 1) {
Arrays.fill(q, n == 0 ? Double.NaN : values[from]);
return q;
}
// Create the range
final int[] x;
final int start;
final int end;
if (copy) {
x = Statistics.copy(values, from, to);
start = 0;
end = n;
} else {
x = values;
start = from;
end = to;
}
// Collect interpolation positions. We use the output q as storage.
final int[] indices = computeIndices(n, p, q, start);
// Partition
Selection.select(x, start, end, indices);
// Compute
for (int k = 0; k < p.length; k++) {
// ip in [0, n); i in [start, end)
final int ip = (int) q[k];
final int i = start + ip;
if (q[k] > ip) {
q[k] = Interpolation.interpolate(x[i], x[i + 1], q[k] - ip);
} else {
q[k] = x[i];
}
}
return q;
}
/**
* Evaluate the {@code p}-th quantile of the values.
*
* <p>This method can be used when the values of known size are already sorted.
*
* <pre>{@code
* short[] x = ...
* Arrays.sort(x);
* double q = Quantile.withDefaults().evaluate(x.length, i -> x[i], 0.05);
* }</pre>
*
* @param n Size of the values.
* @param values Values function.
* @param p Probability for the quantile to compute.
* @return the quantile
* @throws IllegalArgumentException if {@code size < 0}; or if the probability {@code p} is
* not in the range {@code [0, 1]}.
*/
public double evaluate(int n, IntToDoubleFunction values, double p) {
checkSize(n);
checkProbability(p);
// Special case
if (n <= 1) {
return n == 0 ? Double.NaN : values.applyAsDouble(0);
}
final double pos = estimationType.index(p, n);
final int i = (int) pos;
final double v1 = values.applyAsDouble(i);
if (pos > i) {
final double v2 = values.applyAsDouble(i + 1);
return Interpolation.interpolate(v1, v2, pos - i);
}
return v1;
}
/**
* Evaluate the {@code p}-th quantiles of the values.
*
* <p>This method can be used when the values of known size are already sorted.
*
* <pre>{@code
* short[] x = ...
* Arrays.sort(x);
* double[] q = Quantile.withDefaults().evaluate(x.length, i -> x[i], 0.25, 0.5, 0.75);
* }</pre>
*
* @param n Size of the values.
* @param values Values function.
* @param p Probabilities for the quantiles to compute.
* @return the quantiles
* @throws IllegalArgumentException if {@code size < 0}; if any probability {@code p} is
* not in the range {@code [0, 1]}; or no probabilities are specified.
*/
public double[] evaluate(int n, IntToDoubleFunction values, double... p) {
checkSize(n);
checkProbabilities(p);
// Special case
final double[] q = new double[p.length];
if (n <= 1) {
Arrays.fill(q, n == 0 ? Double.NaN : values.applyAsDouble(0));
return q;
}
for (int k = 0; k < p.length; k++) {
final double pos = estimationType.index(p[k], n);
final int i = (int) pos;
final double v1 = values.applyAsDouble(i);
if (pos > i) {
final double v2 = values.applyAsDouble(i + 1);
q[k] = Interpolation.interpolate(v1, v2, pos - i);
} else {
q[k] = v1;
}
}
return q;
}
/**
* Check the probability {@code p} is in the range {@code [0, 1]}.
*
* @param p Probability for the quantile to compute.
* @throws IllegalArgumentException if the probability is not in the range {@code [0, 1]}
*/
private static void checkProbability(double p) {
// Logic negation will detect NaN
if (!(p >= 0 && p <= 1)) {
throw new IllegalArgumentException(INVALID_PROBABILITY + p);
}
}
/**
* Check the probabilities {@code p} are in the range {@code [0, 1]}.
*
* @param p Probabilities for the quantiles to compute.
* @throws IllegalArgumentException if any probabilities {@code p} is not in the range {@code [0, 1]};
* or no probabilities are specified.
*/
private static void checkProbabilities(double... p) {
if (p.length == 0) {
throw new IllegalArgumentException(NO_PROBABILITIES_SPECIFIED);
}
for (final double pp : p) {
checkProbability(pp);
}
}
/**
* Check the {@code size} is positive.
*
* @param n Size of the values.
* @throws IllegalArgumentException if {@code size < 0}
*/
private static void checkSize(int n) {
if (n < 0) {
throw new IllegalArgumentException(INVALID_SIZE + n);
}
}
/**
* Check the number of probabilities {@code n} is strictly positive.
*
* @param n Number of probabilities.
* @throws IllegalArgumentException if {@code c < 1}
*/
private static void checkNumberOfProbabilities(int n) {
if (n < 1) {
throw new IllegalArgumentException(INVALID_NUMBER_OF_PROBABILITIES + n);
}
}
/**
* Compute the indices required for quantile interpolation.
*
* <p>The zero-based interpolation index in {@code [0, n)} is
* saved into the working array {@code q} for each {@code p}.
*
* <p>The indices are incremented by the provided {@code offset} to allow
* addressing sub-ranges of a larger array.
*
* @param n Size of the data.
* @param p Probabilities for the quantiles to compute.
* @param q Working array for quantiles in {@code [0, n)}.
* @param offset Array offset.
* @return the indices in {@code [offset, offset + n)}
*/
private int[] computeIndices(int n, double[] p, double[] q, int offset) {
final int[] indices = new int[p.length << 1];
int count = 0;
for (int k = 0; k < p.length; k++) {
final double pos = estimationType.index(p[k], n);
q[k] = pos;
final int i = (int) pos;
indices[count++] = offset + i;
if (pos > i) {
// Require the next index for interpolation
indices[count++] = offset + i + 1;
}
}
if (count < indices.length) {
return Arrays.copyOf(indices, count);
}
return indices;
}
/**
* Estimation methods for a quantile. Provides the nine quantile algorithms
* defined in Hyndman and Fan (1996)[1] as {@code HF1 - HF9}.
*
* <p>Samples quantiles are defined by:
*
* <p>\[ Q(p) = (1 - \gamma) x_j + \gamma x_{j+1} \]
*
* <p>where \( \frac{j-m}{n} \leq p \le \frac{j-m+1}{n} \), \( x_j \) is the \( j \)th
* order statistic, \( n \) is the sample size, the value of \( \gamma \) is a function
* of \( j = \lfloor np+m \rfloor \) and \( g = np + m - j \), and \( m \) is a constant
* determined by the sample quantile type.
*
* <p>Note that the real-valued position \( np + m \) is a 1-based index and
* \( j \in [1, n] \). If the real valued position is computed as beyond the lowest or
* highest values in the sample, this implementation will return the minimum or maximum
* observation respectively.
*
* <p>Types 1, 2, and 3 are discontinuous functions of \( p \); types 4 to 9 are continuous
* functions of \( p \).
*
* <p>For the continuous functions, the probability \( p_k \) is provided for the \( k \)-th order
* statistic in size \( n \). Samples quantiles are equivalently obtained to \( Q(p) \) by
* linear interpolation between points \( (p_k, x_k) \) and \( (p_{k+1}, x_{k+1}) \) for
* any \( p_k \leq p \leq p_{k+1} \).
*
* <ol>
* <li>Hyndman and Fan (1996)
* <i>Sample Quantiles in Statistical Packages.</i>
* The American Statistician, 50, 361-365.
* <a href="https://www.jstor.org/stable/2684934">doi.org/10.2307/2684934</a>
* <li><a href="http://en.wikipedia.org/wiki/Quantile">Quantile (Wikipedia)</a>
* </ol>
*/
public enum EstimationMethod {
/**
* Inverse of the empirical distribution function.
*
* <p>\( m = 0 \). \( \gamma = 0 \) if \( g = 0 \), and 1 otherwise.
*/
HF1 {
@Override
double position0(double p, int n) {
// position = np + 0. This is 1-based so adjust to 0-based.
return Math.ceil(n * p) - 1;
}
},
/**
* Similar to {@link #HF1} with averaging at discontinuities.
*
* <p>\( m = 0 \). \( \gamma = 0.5 \) if \( g = 0 \), and 1 otherwise.
*/
HF2 {
@Override
double position0(double p, int n) {
final double pos = n * p;
// Average at discontinuities
final int j = (int) pos;
final double g = pos - j;
if (g == 0) {
return j - 0.5;
}
// As HF1 : ceil(j + g) - 1
return j;
}
},
/**
* The observation closest to \( np \). Ties are resolved to the nearest even order statistic.
*
* <p>\( m = -1/2 \). \( \gamma = 0 \) if \( g = 0 \) and \( j \) is even, and 1 otherwise.
*/
HF3 {
@Override
double position0(double p, int n) {
// Let rint do the work for ties to even
return Math.rint(n * p) - 1;
}
},
/**
* Linear interpolation of the inverse of the empirical CDF.
*
* <p>\( m = 0 \). \( p_k = \frac{k}{n} \).
*/
HF4 {
@Override
double position0(double p, int n) {
// np + 0 - 1
return n * p - 1;
}
},
/**
* A piecewise linear function where the knots are the values midway through the steps of
* the empirical CDF. Proposed by Hazen (1914) and popular amongst hydrologists.
*
* <p>\( m = 1/2 \). \( p_k = \frac{k - 1/2}{n} \).
*/
HF5 {
@Override
double position0(double p, int n) {
// np + 0.5 - 1
return n * p - 0.5;
}
},
/**
* Linear interpolation of the expectations for the order statistics for the uniform
* distribution on [0,1]. Proposed by Weibull (1939).
*
* <p>\( m = p \). \( p_k = \frac{k}{n + 1} \).
*
* <p>This method computes the quantile as per the Apache Commons Math Percentile
* legacy implementation.
*/
HF6 {
@Override
double position0(double p, int n) {
// np + p - 1
return (n + 1) * p - 1;
}
},
/**
* Linear interpolation of the modes for the order statistics for the uniform
* distribution on [0,1]. Proposed by Gumbull (1939).
*
* <p>\( m = 1 - p \). \( p_k = \frac{k - 1}{n - 1} \).
*/
HF7 {
@Override
double position0(double p, int n) {
// np + 1-p - 1
return (n - 1) * p;
}
},
/**
* Linear interpolation of the approximate medians for order statistics.
*
* <p>\( m = (p + 1)/3 \). \( p_k = \frac{k - 1/3}{n + 1/3} \).
*
* <p>As per Hyndman and Fan (1996) this approach is most recommended as it provides
* an approximate median-unbiased estimate regardless of distribution.
*/
HF8 {
@Override
double position0(double p, int n) {
return n * p + (p + 1) / 3 - 1;
}
},
/**
* Quantile estimates are approximately unbiased for the expected order statistics if
* \( x \) is normally distributed.
*
* <p>\( m = p/4 + 3/8 \). \( p_k = \frac{k - 3/8}{n + 1/4} \).
*/
HF9 {
@Override
double position0(double p, int n) {
// np + p/4 + 3/8 - 1
return (n + 0.25) * p - 0.625;
}
};
/**
* Finds the real-valued position for calculation of the quantile.
*
* <p>Return {@code i + g} such that the quantile value from sorted data is:
*
* <p>value = data[i] + g * (data[i+1] - data[i])
*
* <p>Warning: Interpolation should not use {@code data[i+1]} unless {@code g != 0}.
*
* <p>Note: In contrast to the definition of Hyndman and Fan in the class header
* which uses a 1-based position, this is a zero based index. This change is for
* convenience when addressing array positions.
*
* @param p p<sup>th</sup> quantile.
* @param n Size.
* @return a real-valued position (0-based) into the range {@code [0, n)}
*/
abstract double position0(double p, int n);
/**
* Finds the index {@code i} and fractional part {@code g} of a real-valued position
* to interpolate the quantile.
*
* <p>Return {@code i + g} such that the quantile value from sorted data is:
*
* <p>value = data[i] + g * (data[i+1] - data[i])
*
* <p>Note: Interpolation should not use {@code data[i+1]} unless {@code g != 0}.
*
* @param p p<sup>th</sup> quantile.
* @param n Size.
* @return index (in [0, n-1])
*/
final double index(double p, int n) {
final double pos = position0(p, n);
// Bounds check in [0, n-1]
if (pos < 0) {
return 0;
}
if (pos > n - 1.0) {
return n - 1.0;
}
return pos;
}
}
}
|
oracle/graalpython | 35,559 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/objects/memoryview/MemoryViewNodes.java | /*
* Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.graal.python.builtins.objects.memoryview;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.IndexError;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.NotImplementedError;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.OverflowError;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.TypeError;
import static com.oracle.graal.python.builtins.PythonBuiltinClassType.ValueError;
import com.oracle.graal.python.builtins.objects.buffer.PythonBufferAccessLibrary;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes;
import com.oracle.graal.python.builtins.objects.cext.capi.CExtNodes.PCallCapiFunction;
import com.oracle.graal.python.builtins.objects.cext.capi.NativeCAPISymbol;
import com.oracle.graal.python.builtins.objects.cext.structs.CStructAccess;
import com.oracle.graal.python.builtins.objects.common.BufferStorageNodes;
import com.oracle.graal.python.builtins.objects.common.SequenceNodes;
import com.oracle.graal.python.builtins.objects.common.SequenceStorageNodes;
import com.oracle.graal.python.builtins.objects.memoryview.NativeBufferLifecycleManager.NativeBufferLifecycleManagerFromSlot;
import com.oracle.graal.python.builtins.objects.tuple.PTuple;
import com.oracle.graal.python.lib.PyIndexCheckNode;
import com.oracle.graal.python.lib.PyNumberAsSizeNode;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PGuards;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.nodes.call.CallNode;
import com.oracle.graal.python.nodes.object.BuiltinClassProfiles.IsBuiltinObjectProfile;
import com.oracle.graal.python.nodes.util.CastToByteNode;
import com.oracle.graal.python.runtime.ExecutionContext.IndirectCallContext;
import com.oracle.graal.python.runtime.IndirectCallData;
import com.oracle.graal.python.runtime.exception.PException;
import com.oracle.graal.python.runtime.sequence.storage.NativeByteSequenceStorage;
import com.oracle.graal.python.runtime.sequence.storage.SequenceStorage;
import com.oracle.graal.python.util.BufferFormat;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.CompilerDirectives.ValueType;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.Cached.Shared;
import com.oracle.truffle.api.dsl.Fallback;
import com.oracle.truffle.api.dsl.GenerateCached;
import com.oracle.truffle.api.dsl.GenerateInline;
import com.oracle.truffle.api.dsl.GenerateUncached;
import com.oracle.truffle.api.dsl.ImportStatic;
import com.oracle.truffle.api.dsl.NeverDefault;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.library.CachedLibrary;
import com.oracle.truffle.api.nodes.ExplodeLoop;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.profiles.InlinedConditionProfile;
import com.oracle.truffle.api.strings.TruffleString;
public class MemoryViewNodes {
static boolean isByteFormat(BufferFormat format) {
return format == BufferFormat.UINT_8 || format == BufferFormat.INT_8 || format == BufferFormat.CHAR;
}
static void checkBufferBounds(Node node, PMemoryView self, PythonBufferAccessLibrary bufferLib, int offset, int length) {
if (offset + length > bufferLib.getBufferLength(self.getBuffer())) {
/*
* This can only happen when the buffer gets resized while being exported. CPython makes
* such resizing illegal in the first place, but we don't prevent it due to absence of
* reference counting.
*/
CompilerDirectives.transferToInterpreterAndInvalidate();
throw PRaiseNode.raiseStatic(node, IndexError, ErrorMessages.INVALID_BUFFER_ACCESS);
}
}
@GenerateInline
@GenerateCached(false)
@GenerateUncached
public abstract static class InitFlagsNode extends Node {
public abstract int execute(Node inliningTarget, int ndim, int itemsize, int[] shape, int[] strides, int[] suboffsets);
@Specialization
static int compute(int ndim, int itemsize, int[] shape, int[] strides, int[] suboffsets) {
if (ndim == 0) {
return PMemoryView.FLAG_C | PMemoryView.FLAG_FORTRAN | PMemoryView.FLAG_SCALAR;
} else if (suboffsets != null) {
return PMemoryView.FLAG_PIL;
} else {
int flags = PMemoryView.FLAG_C | PMemoryView.FLAG_FORTRAN;
int expectedStride = itemsize;
for (int i = ndim - 1; i >= 0; i--) {
int dim = shape[i];
if (dim > 1 && strides[i] != expectedStride) {
flags &= ~PMemoryView.FLAG_C;
break;
}
expectedStride *= dim;
}
expectedStride = itemsize;
for (int i = 0; i < ndim; i++) {
int dim = shape[i];
if (dim > 1 && strides[i] != expectedStride) {
flags &= ~PMemoryView.FLAG_FORTRAN;
break;
}
expectedStride *= dim;
}
return flags;
}
}
}
@GenerateInline
@GenerateCached(false)
@ImportStatic(BufferFormat.class)
public abstract static class UnpackValueNode extends Node {
public abstract Object execute(Node inliningTarget, BufferFormat format, TruffleString formatStr, Object buffer, int offset);
@Specialization(guards = "format != OTHER")
static Object unpack(Node inliningTarget, BufferFormat format, @SuppressWarnings("unused") TruffleString formatStr, Object buffer, int offset,
@Cached BufferStorageNodes.UnpackValueNode unpackValueNode) {
return unpackValueNode.execute(inliningTarget, format, buffer, offset);
}
@Fallback
@SuppressWarnings("unused")
static Object notImplemented(Node inliningTarget, BufferFormat format, TruffleString formatStr, Object buffer, int offset) {
throw PRaiseNode.raiseStatic(inliningTarget, NotImplementedError, ErrorMessages.MEMORYVIEW_FORMAT_S_NOT_SUPPORTED, formatStr);
}
}
@GenerateInline
@GenerateCached(false)
@ImportStatic(BufferFormat.class)
public abstract static class PackValueNode extends Node {
public abstract void execute(VirtualFrame frame, Node inliningTarget, BufferFormat format, TruffleString formatStr, Object object, Object buffer, int offset);
@Specialization(guards = "format != OTHER")
static void pack(VirtualFrame frame, Node inliningTarget, BufferFormat format, TruffleString formatStr, Object value, Object buffer, int offset,
@Cached IsBuiltinObjectProfile errorProfile,
@Cached BufferStorageNodes.PackValueNode packValueNode,
@Cached PRaiseNode raiseNode) {
try {
packValueNode.execute(frame, inliningTarget, format, value, buffer, offset);
} catch (PException e) {
e.expect(inliningTarget, OverflowError, errorProfile);
throw raiseNode.raise(inliningTarget, ValueError, ErrorMessages.MEMORYVIEW_INVALID_VALUE_FOR_FORMAT_S, formatStr);
}
}
@Fallback
@SuppressWarnings("unused")
static void notImplemented(Node inliningTarget, BufferFormat format, TruffleString formatStr, Object object, Object buffer, int offset) {
throw PRaiseNode.raiseStatic(inliningTarget, NotImplementedError, ErrorMessages.MEMORYVIEW_FORMAT_S_NOT_SUPPORTED, formatStr);
}
}
@GenerateUncached
@GenerateInline
@GenerateCached(false)
abstract static class ReadBytesAtNode extends Node {
public abstract void execute(Node inliningTarget, byte[] dest, int destOffset, int len, PMemoryView self, Object ptr, int offset);
@Specialization(guards = {"ptr != null", "cachedLen == len", "cachedLen <= 8"}, limit = "4")
@ExplodeLoop
static void doNativeCached(byte[] dest, int destOffset, @SuppressWarnings("unused") int len, @SuppressWarnings("unused") PMemoryView self, Object ptr, int offset,
@Cached("len") int cachedLen,
@Shared @Cached(inline = false) CStructAccess.ReadByteNode readNode) {
readNode.readByteArray(ptr, dest, cachedLen, offset, destOffset);
}
@Specialization(guards = "ptr != null", replaces = "doNativeCached")
static void doNativeGeneric(byte[] dest, int destOffset, int len, @SuppressWarnings("unused") PMemoryView self, Object ptr, int offset,
@Shared @Cached(inline = false) CStructAccess.ReadByteNode readNode) {
readNode.readByteArray(ptr, dest, len, offset, destOffset);
}
@Specialization(guards = {"ptr == null", "cachedLen == len", "cachedLen <= 8"}, limit = "4")
@ExplodeLoop
void doManagedCached(byte[] dest, int destOffset, @SuppressWarnings("unused") int len, PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset,
@CachedLibrary("self.getBuffer()") PythonBufferAccessLibrary bufferLib,
@Cached("len") int cachedLen) {
checkBufferBounds(this, self, bufferLib, offset, cachedLen);
bufferLib.readIntoByteArray(self.getBuffer(), offset, dest, destOffset, cachedLen);
}
@Specialization(guards = "ptr == null", replaces = "doManagedCached", limit = "3")
void doManagedGeneric(byte[] dest, int destOffset, int len, PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset,
@CachedLibrary("self.getBuffer()") PythonBufferAccessLibrary bufferLib) {
checkBufferBounds(this, self, bufferLib, offset, len);
bufferLib.readIntoByteArray(self.getBuffer(), offset, dest, destOffset, len);
}
}
@GenerateUncached
@GenerateInline
@GenerateCached(false)
abstract static class WriteBytesAtNode extends Node {
public abstract void execute(Node inliningTarget, byte[] src, int srcOffset, int len, PMemoryView self, Object ptr, int offset);
@Specialization(guards = {"ptr != null", "cachedLen == len", "cachedLen <= 8"}, limit = "4")
@ExplodeLoop
static void doNativeCached(byte[] src, int srcOffset, @SuppressWarnings("unused") int len, @SuppressWarnings("unused") PMemoryView self, Object ptr, int offset,
@Cached("len") int cachedLen,
@Shared @Cached(inline = false) CStructAccess.WriteByteNode writeNode) {
writeNode.writeByteArray(ptr, src, cachedLen, srcOffset, offset);
}
@Specialization(guards = "ptr != null", replaces = "doNativeCached")
static void doNativeGeneric(byte[] src, int srcOffset, int len, @SuppressWarnings("unused") PMemoryView self, Object ptr, int offset,
@Shared @Cached(inline = false) CStructAccess.WriteByteNode writeNode) {
writeNode.writeByteArray(ptr, src, len, srcOffset, offset);
}
@Specialization(guards = {"ptr == null", "cachedLen == len", "cachedLen <= 8"}, limit = "4")
@ExplodeLoop
void doManagedCached(byte[] src, int srcOffset, @SuppressWarnings("unused") int len, PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset,
@CachedLibrary("self.getBuffer()") PythonBufferAccessLibrary bufferLib,
@Cached("len") int cachedLen) {
checkBufferBounds(this, self, bufferLib, offset, cachedLen);
bufferLib.writeFromByteArray(self.getBuffer(), offset, src, srcOffset, cachedLen);
}
@Specialization(guards = "ptr == null", replaces = "doManagedCached", limit = "3")
void doManagedGeneric(byte[] src, int srcOffset, int len, PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset,
@CachedLibrary("self.getBuffer()") PythonBufferAccessLibrary bufferLib) {
checkBufferBounds(this, self, bufferLib, offset, len);
bufferLib.writeFromByteArray(self.getBuffer(), offset, src, srcOffset, len);
}
}
@GenerateInline(false) // footprint reduction 48 -> 29
abstract static class ReadItemAtNode extends Node {
public abstract Object execute(VirtualFrame frame, PMemoryView self, Object ptr, int offset);
@Specialization(guards = "ptr != null")
static Object doNative(PMemoryView self, Object ptr, int offset,
@Bind Node inliningTarget,
@Shared @CachedLibrary(limit = "3") PythonBufferAccessLibrary bufferLib,
@Shared @Cached UnpackValueNode unpackValueNode) {
int itemSize = self.getItemSize();
checkBufferBounds(inliningTarget, self, bufferLib, offset, itemSize);
NativeByteSequenceStorage buffer = NativeByteSequenceStorage.create(ptr, itemSize + offset, itemSize + offset, false);
return unpackValueNode.execute(inliningTarget, self.getFormat(), self.getFormatString(), buffer, offset);
}
@Specialization(guards = "ptr == null")
static Object doManaged(PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset,
@Bind Node inliningTarget,
@Shared @CachedLibrary(limit = "3") PythonBufferAccessLibrary bufferLib,
@Shared @Cached UnpackValueNode unpackValueNode) {
int itemSize = self.getItemSize();
checkBufferBounds(inliningTarget, self, bufferLib, offset, itemSize);
return unpackValueNode.execute(inliningTarget, self.getFormat(), self.getFormatString(), self.getBuffer(), offset);
}
@NeverDefault
protected static CastToByteNode createCoerce() {
return CastToByteNode.create(true);
}
}
@GenerateInline(false) // footprint reduction 48 -> 29
abstract static class WriteItemAtNode extends Node {
public abstract void execute(VirtualFrame frame, PMemoryView self, Object ptr, int offset, Object object);
@Specialization(guards = "ptr != null")
static void doNative(VirtualFrame frame, PMemoryView self, Object ptr, int offset, Object object,
@Bind Node inliningTarget,
@Shared @CachedLibrary(limit = "3") PythonBufferAccessLibrary bufferLib,
@Shared @Cached PackValueNode packValueNode) {
int itemSize = self.getItemSize();
checkBufferBounds(inliningTarget, self, bufferLib, offset, itemSize);
NativeByteSequenceStorage buffer = NativeByteSequenceStorage.create(ptr, itemSize + offset, itemSize + offset, false);
packValueNode.execute(frame, inliningTarget, self.getFormat(), self.getFormatString(), object, buffer, offset);
}
@Specialization(guards = "ptr == null")
static void doManaged(VirtualFrame frame, PMemoryView self, @SuppressWarnings("unused") Object ptr, int offset, Object object,
@Bind Node inliningTarget,
@Shared @CachedLibrary(limit = "3") PythonBufferAccessLibrary bufferLib,
@Shared @Cached PackValueNode packValueNode) {
int itemSize = self.getItemSize();
checkBufferBounds(inliningTarget, self, bufferLib, offset, itemSize);
packValueNode.execute(frame, inliningTarget, self.getFormat(), self.getFormatString(), object, self.getBuffer(), offset);
}
}
@ValueType
static class MemoryPointer {
public Object ptr;
public int offset;
public MemoryPointer(Object ptr, int offset) {
this.ptr = ptr;
this.offset = offset;
}
}
@ImportStatic(PGuards.class)
abstract static class PointerLookupNode extends Node {
@Child private CExtNodes.PCallCapiFunction callCapiFunction;
@Child private PyNumberAsSizeNode asSizeNode;
// index can be a tuple, int or int-convertible
public abstract MemoryPointer execute(VirtualFrame frame, PMemoryView self, Object index);
public abstract MemoryPointer execute(VirtualFrame frame, PMemoryView self, int index);
private void lookupDimension(Node inliningTarget, PMemoryView self, MemoryPointer ptr, int dim, int initialIndex, InlinedConditionProfile hasSuboffsetsProfile, PRaiseNode raiseNode) {
int index = initialIndex;
int[] shape = self.getBufferShape();
int nitems = shape[dim];
if (index < 0) {
index += nitems;
}
if (index < 0 || index >= nitems) {
throw raiseNode.raise(inliningTarget, IndexError, ErrorMessages.INDEX_OUT_OF_BOUNDS_ON_DIMENSION_D, dim + 1);
}
ptr.offset += self.getBufferStrides()[dim] * index;
int[] suboffsets = self.getBufferSuboffsets();
if (hasSuboffsetsProfile.profile(inliningTarget, suboffsets != null) && suboffsets[dim] >= 0) {
// The length may be out of bounds, but sulong shouldn't care if we don't
// access the out-of-bound part
ptr.ptr = getCallCapiFunction().call(NativeCAPISymbol.FUN_ADD_SUBOFFSET, ptr.ptr, ptr.offset, suboffsets[dim]);
ptr.offset = 0;
}
}
@Specialization
MemoryPointer resolveInt(PMemoryView self, int index,
@Bind Node inliningTarget,
@Shared @Cached InlinedConditionProfile hasOneDimensionProfile,
@Shared @Cached InlinedConditionProfile hasSuboffsetsProfile,
@Shared @Cached PRaiseNode raiseNode) {
if (hasOneDimensionProfile.profile(inliningTarget, self.getDimensions() == 1)) {
MemoryPointer ptr = new MemoryPointer(self.getBufferPointer(), self.getOffset());
lookupDimension(inliningTarget, self, ptr, 0, index, hasSuboffsetsProfile, raiseNode);
return ptr;
}
if (self.getDimensions() == 0) {
throw raiseNode.raise(inliningTarget, TypeError, ErrorMessages.INVALID_INDEXING_OF_0_DIM_MEMORY);
} else {
// CPython doesn't implement this either, as of 3.8
throw raiseNode.raise(inliningTarget, NotImplementedError, ErrorMessages.MULTI_DIMENSIONAL_SUB_VIEWS_NOT_IMPLEMENTED);
}
}
@Specialization(guards = {"cachedDimensions == self.getDimensions()", "cachedDimensions <= 8"}, limit = "3")
@ExplodeLoop
@SuppressWarnings("truffle-static-method")
MemoryPointer resolveTupleCached(VirtualFrame frame, PMemoryView self, PTuple indices,
@Bind Node inliningTarget,
@Shared @Cached InlinedConditionProfile hasSuboffsetsProfile,
@Shared @Cached PyIndexCheckNode indexCheckNode,
@Cached("self.getDimensions()") int cachedDimensions,
@Shared @Cached SequenceNodes.GetSequenceStorageNode getSequenceStorageNode,
@Shared @Cached SequenceStorageNodes.GetItemScalarNode getItemNode,
@Shared @Cached PRaiseNode raiseNode) {
SequenceStorage indicesStorage = getSequenceStorageNode.execute(inliningTarget, indices);
checkTupleLength(inliningTarget, indicesStorage, cachedDimensions, raiseNode);
MemoryPointer ptr = new MemoryPointer(self.getBufferPointer(), self.getOffset());
for (int dim = 0; dim < cachedDimensions; dim++) {
Object indexObj = getItemNode.execute(inliningTarget, indicesStorage, dim);
int index = convertIndex(frame, inliningTarget, indexCheckNode, indexObj, raiseNode);
lookupDimension(inliningTarget, self, ptr, dim, index, hasSuboffsetsProfile, raiseNode);
}
return ptr;
}
@Specialization(replaces = "resolveTupleCached")
MemoryPointer resolveTupleGeneric(VirtualFrame frame, PMemoryView self, PTuple indices,
@Bind Node inliningTarget,
@Shared @Cached InlinedConditionProfile hasSuboffsetsProfile,
@Shared @Cached PyIndexCheckNode indexCheckNode,
@Shared @Cached SequenceNodes.GetSequenceStorageNode getSequenceStorageNode,
@Shared @Cached SequenceStorageNodes.GetItemScalarNode getItemNode,
@Shared @Cached PRaiseNode raiseNode) {
SequenceStorage indicesStorage = getSequenceStorageNode.execute(inliningTarget, indices);
int ndim = self.getDimensions();
checkTupleLength(inliningTarget, indicesStorage, ndim, raiseNode);
MemoryPointer ptr = new MemoryPointer(self.getBufferPointer(), self.getOffset());
for (int dim = 0; dim < ndim; dim++) {
Object indexObj = getItemNode.execute(inliningTarget, indicesStorage, dim);
int index = convertIndex(frame, inliningTarget, indexCheckNode, indexObj, raiseNode);
lookupDimension(inliningTarget, self, ptr, dim, index, hasSuboffsetsProfile, raiseNode);
}
return ptr;
}
@Specialization(guards = "!isPTuple(indexObj)")
MemoryPointer resolveIntObj(VirtualFrame frame, PMemoryView self, Object indexObj,
@Bind Node inliningTarget,
@Shared @Cached InlinedConditionProfile hasOneDimensionProfile,
@Shared @Cached InlinedConditionProfile hasSuboffsetsProfile,
@Shared @Cached PyIndexCheckNode indexCheckNode,
@Shared @Cached PRaiseNode raiseNode) {
final int index = convertIndex(frame, inliningTarget, indexCheckNode, indexObj, raiseNode);
return resolveInt(self, index, inliningTarget, hasOneDimensionProfile, hasSuboffsetsProfile, raiseNode);
}
private static void checkTupleLength(Node inliningTarget, SequenceStorage indicesStorage, int ndim, PRaiseNode raiseNode) {
int length = indicesStorage.length();
if (length == ndim) {
return;
}
// Error cases
if (ndim == 0) {
throw raiseNode.raise(inliningTarget, TypeError, ErrorMessages.INVALID_INDEXING_OF_0_DIM_MEMORY);
} else if (length > ndim) {
throw raiseNode.raise(inliningTarget, TypeError, ErrorMessages.CANNOT_INDEX_D_DIMENSION_VIEW_WITH_D, ndim, length);
} else {
// CPython doesn't implement this either, as of 3.8
throw raiseNode.raise(inliningTarget, NotImplementedError, ErrorMessages.SUB_VIEWS_NOT_IMPLEMENTED);
}
}
private int convertIndex(VirtualFrame frame, Node inliningTarget, PyIndexCheckNode indexCheckNode, Object indexObj, PRaiseNode raiseNode) {
if (!indexCheckNode.execute(inliningTarget, indexObj)) {
throw raiseNode.raise(inliningTarget, TypeError, ErrorMessages.MEMORYVIEW_INVALID_SLICE_KEY);
}
return getAsSizeNode().executeExact(frame, inliningTarget, indexObj, IndexError);
}
private PyNumberAsSizeNode getAsSizeNode() {
if (asSizeNode == null) {
CompilerDirectives.transferToInterpreterAndInvalidate();
asSizeNode = insert(PyNumberAsSizeNode.create());
}
return asSizeNode;
}
private CExtNodes.PCallCapiFunction getCallCapiFunction() {
if (callCapiFunction == null) {
CompilerDirectives.transferToInterpreterAndInvalidate();
callCapiFunction = insert(CExtNodes.PCallCapiFunction.create());
}
return callCapiFunction;
}
}
@GenerateUncached
@GenerateInline(false)
public abstract static class ToJavaBytesNode extends Node {
public abstract byte[] execute(PMemoryView self);
@Specialization(guards = {"self.getDimensions() == cachedDimensions", "cachedDimensions < 8"}, limit = "3")
@SuppressWarnings("truffle-static-method")
byte[] tobytesCached(PMemoryView self,
@Bind Node inliningTarget,
@Cached("self.getDimensions()") int cachedDimensions,
@Shared @Cached ReadBytesAtNode readBytesAtNode,
@Shared @Cached CExtNodes.PCallCapiFunction callCapiFunction,
@Shared @Cached PRaiseNode raiseNode) {
self.checkReleased(inliningTarget, raiseNode);
byte[] bytes = new byte[self.getLength()];
if (cachedDimensions == 0) {
readBytesAtNode.execute(inliningTarget, bytes, 0, self.getItemSize(), self, self.getBufferPointer(), self.getOffset());
} else {
convert(inliningTarget, bytes, self, cachedDimensions, readBytesAtNode, callCapiFunction);
}
return bytes;
}
@Specialization(replaces = "tobytesCached")
byte[] tobytesGeneric(PMemoryView self,
@Bind Node inliningTarget,
@Shared @Cached ReadBytesAtNode readBytesAtNode,
@Shared @Cached CExtNodes.PCallCapiFunction callCapiFunction,
@Shared @Cached PRaiseNode raiseNode) {
self.checkReleased(inliningTarget, raiseNode);
byte[] bytes = new byte[self.getLength()];
if (self.getDimensions() == 0) {
readBytesAtNode.execute(inliningTarget, bytes, 0, self.getItemSize(), self, self.getBufferPointer(), self.getOffset());
} else {
convertBoundary(inliningTarget, bytes, self, self.getDimensions(), readBytesAtNode, callCapiFunction);
}
return bytes;
}
@TruffleBoundary
private void convertBoundary(Node inliningTarget, byte[] dest, PMemoryView self, int ndim, ReadBytesAtNode readBytesAtNode, CExtNodes.PCallCapiFunction callCapiFunction) {
convert(inliningTarget, dest, self, ndim, readBytesAtNode, callCapiFunction);
}
protected void convert(Node inliningTarget, byte[] dest, PMemoryView self, int ndim, ReadBytesAtNode readBytesAtNode, CExtNodes.PCallCapiFunction callCapiFunction) {
recursive(inliningTarget, dest, 0, self, 0, ndim, self.getBufferPointer(), self.getOffset(), readBytesAtNode, callCapiFunction);
}
private static int recursive(Node inliningTarget, byte[] dest, int initialDestOffset, PMemoryView self, int dim, int ndim, Object ptr, int initialOffset, ReadBytesAtNode readBytesAtNode,
CExtNodes.PCallCapiFunction callCapiFunction) {
int offset = initialOffset;
int destOffset = initialDestOffset;
for (int i = 0; i < self.getBufferShape()[dim]; i++) {
Object xptr = ptr;
int xoffset = offset;
if (self.getBufferSuboffsets() != null && self.getBufferSuboffsets()[dim] >= 0) {
xptr = callCapiFunction.call(NativeCAPISymbol.FUN_ADD_SUBOFFSET, ptr, offset, self.getBufferSuboffsets()[dim]);
xoffset = 0;
}
if (dim == ndim - 1) {
readBytesAtNode.execute(inliningTarget, dest, destOffset, self.getItemSize(), self, xptr, xoffset);
destOffset += self.getItemSize();
} else {
destOffset = recursive(inliningTarget, dest, destOffset, self, dim + 1, ndim, xptr, xoffset, readBytesAtNode, callCapiFunction);
}
offset += self.getBufferStrides()[dim];
}
return destOffset;
}
@NeverDefault
public static ToJavaBytesNode create() {
return MemoryViewNodesFactory.ToJavaBytesNodeGen.create();
}
}
@GenerateUncached
@GenerateInline(false)
public abstract static class ToJavaBytesFortranOrderNode extends ToJavaBytesNode {
@Override
protected void convert(Node inliningTarget, byte[] dest, PMemoryView self, int ndim, ReadBytesAtNode readBytesAtNode, CExtNodes.PCallCapiFunction callCapiFunction) {
recursive(inliningTarget, dest, 0, self.getItemSize(), self, 0, ndim, self.getBufferPointer(), self.getOffset(), readBytesAtNode, callCapiFunction);
}
private static void recursive(Node inliningTarget, byte[] dest, int initialDestOffset, int destStride, PMemoryView self, int dim, int ndim, Object ptr, int initialOffset,
ReadBytesAtNode readBytesAtNode,
CExtNodes.PCallCapiFunction callCapiFunction) {
int offset = initialOffset;
int destOffset = initialDestOffset;
for (int i = 0; i < self.getBufferShape()[dim]; i++) {
Object xptr = ptr;
int xoffset = offset;
if (self.getBufferSuboffsets() != null && self.getBufferSuboffsets()[dim] >= 0) {
xptr = callCapiFunction.call(NativeCAPISymbol.FUN_ADD_SUBOFFSET, ptr, offset, self.getBufferSuboffsets()[dim]);
xoffset = 0;
}
if (dim == ndim - 1) {
readBytesAtNode.execute(inliningTarget, dest, destOffset, self.getItemSize(), self, xptr, xoffset);
} else {
recursive(inliningTarget, dest, destOffset, destStride * self.getBufferShape()[dim], self, dim + 1, ndim, xptr, xoffset, readBytesAtNode, callCapiFunction);
}
destOffset += destStride;
offset += self.getBufferStrides()[dim];
}
}
public static ToJavaBytesFortranOrderNode create() {
return MemoryViewNodesFactory.ToJavaBytesFortranOrderNodeGen.create();
}
}
@GenerateInline(false) // footprint reduction 36 -> 17
public abstract static class ReleaseNode extends Node {
public final void execute(PMemoryView self) {
execute(null, self);
}
public abstract void execute(VirtualFrame frame, PMemoryView self);
@Specialization(guards = "self.getReference() == null")
static void releaseSimple(PMemoryView self,
@Bind Node inliningTarget,
@Shared("raise") @Cached PRaiseNode raiseNode) {
self.checkExports(inliningTarget, raiseNode);
self.setReleased();
}
@Specialization(guards = {"self.getReference() != null"})
static void releaseNative(VirtualFrame frame, PMemoryView self,
@Bind Node inliningTarget,
@Cached("createFor($node)") IndirectCallData indirectCallData,
@Cached ReleaseBufferNode releaseNode,
@Shared("raise") @Cached PRaiseNode raiseNode) {
self.checkExports(inliningTarget, raiseNode);
if (self.checkShouldReleaseBuffer()) {
releaseNode.execute(frame, inliningTarget, indirectCallData, self.getLifecycleManager());
}
self.setReleased();
}
}
@GenerateUncached
@GenerateInline
@GenerateCached(false)
public abstract static class ReleaseBufferNode extends Node {
public abstract void execute(Node inliningTarget, BufferLifecycleManager buffer);
public static void executeUncached(BufferLifecycleManager buffer) {
MemoryViewNodesFactory.ReleaseBufferNodeGen.getUncached().execute(null, buffer);
}
public final void execute(VirtualFrame frame, Node inliningTarget, IndirectCallData indirectCallData, BufferLifecycleManager buffer) {
Object state = IndirectCallContext.enter(frame, inliningTarget, indirectCallData);
try {
execute(inliningTarget, buffer);
} finally {
IndirectCallContext.exit(frame, inliningTarget, indirectCallData, state);
}
}
@Specialization
static void doCApiCached(NativeBufferLifecycleManager.NativeBufferLifecycleManagerFromType buffer,
@Cached(inline = false) PCallCapiFunction callReleaseNode) {
callReleaseNode.call(NativeCAPISymbol.FUN_GRAALPY_RELEASE_BUFFER, buffer.bufferStructPointer);
}
@Specialization
static void doCExtBuffer(NativeBufferLifecycleManagerFromSlot buffer,
@Cached(inline = false) CallNode callNode) {
callNode.executeWithoutFrame(buffer.releaseFunction, buffer.self, buffer.buffer);
}
@Fallback
static void doManaged(@SuppressWarnings("unused") BufferLifecycleManager buffer) {
// nothing to do
}
}
}
|
googleapis/google-cloud-java | 35,049 | java-discoveryengine/google-cloud-discoveryengine/src/test/java/com/google/cloud/discoveryengine/v1alpha/DocumentServiceClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1alpha;
import static com.google.cloud.discoveryengine.v1alpha.DocumentServiceClient.ListDocumentsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.discoveryengine.v1alpha.stub.HttpJsonDocumentServiceStub;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Struct;
import com.google.protobuf.Timestamp;
import com.google.rpc.Status;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DocumentServiceClientHttpJsonTest {
private static MockHttpService mockService;
private static DocumentServiceClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonDocumentServiceStub.getMethodDescriptors(),
DocumentServiceSettings.getDefaultEndpoint());
DocumentServiceSettings settings =
DocumentServiceSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
DocumentServiceSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DocumentServiceClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void getDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDocumentsTest() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listDocumentsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDocumentsTest2() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listDocumentsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
Document document = Document.newBuilder().build();
String documentId = "documentId-814940266";
Document actualResponse = client.createDocument(parent, document, documentId);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
Document document = Document.newBuilder().build();
String documentId = "documentId-814940266";
client.createDocument(parent, document, documentId);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
Document document = Document.newBuilder().build();
String documentId = "documentId-814940266";
Document actualResponse = client.createDocument(parent, document, documentId);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void createDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
Document document = Document.newBuilder().build();
String documentId = "documentId-814940266";
client.createDocument(parent, document, documentId);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
Document document =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
Document actualResponse = client.updateDocument(document, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
Document document =
Document.newBuilder()
.setName(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.setId("id3355")
.setSchemaId("schemaId-697673060")
.setContent(Document.Content.newBuilder().build())
.setParentDocumentId("parentDocumentId1990105056")
.setDerivedStructData(Struct.newBuilder().build())
.setAclInfo(Document.AclInfo.newBuilder().build())
.setIndexTime(Timestamp.newBuilder().build())
.setIndexStatus(Document.IndexStatus.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDocument(document, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteDocumentTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
client.deleteDocument(name);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
client.deleteDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteDocumentTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
client.deleteDocument(name);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
client.deleteDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void importDocumentsTest() throws Exception {
ImportDocumentsResponse expectedResponse =
ImportDocumentsResponse.newBuilder()
.addAllErrorSamples(new ArrayList<Status>())
.setErrorConfig(ImportErrorConfig.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("importDocumentsTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]")
.toString())
.setErrorConfig(ImportErrorConfig.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.setAutoGenerateIds(true)
.setIdField("idField1629396127")
.build();
ImportDocumentsResponse actualResponse = client.importDocumentsAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void importDocumentsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]")
.toString())
.setErrorConfig(ImportErrorConfig.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.setAutoGenerateIds(true)
.setIdField("idField1629396127")
.build();
client.importDocumentsAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void purgeDocumentsTest() throws Exception {
PurgeDocumentsResponse expectedResponse =
PurgeDocumentsResponse.newBuilder()
.setPurgeCount(575305851)
.addAllPurgeSample(new ArrayList<String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("purgeDocumentsTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockService.addResponse(resultOperation);
PurgeDocumentsRequest request =
PurgeDocumentsRequest.newBuilder()
.setParent(
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]")
.toString())
.setFilter("filter-1274492040")
.setErrorConfig(PurgeErrorConfig.newBuilder().build())
.setForce(true)
.build();
PurgeDocumentsResponse actualResponse = client.purgeDocumentsAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void purgeDocumentsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
PurgeDocumentsRequest request =
PurgeDocumentsRequest.newBuilder()
.setParent(
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]")
.toString())
.setFilter("filter-1274492040")
.setErrorConfig(PurgeErrorConfig.newBuilder().build())
.setForce(true)
.build();
client.purgeDocumentsAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void getProcessedDocumentTest() throws Exception {
ProcessedDocument expectedResponse =
ProcessedDocument.newBuilder()
.setDocument(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.build();
mockService.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
ProcessedDocument actualResponse = client.getProcessedDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getProcessedDocumentExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]");
client.getProcessedDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getProcessedDocumentTest2() throws Exception {
ProcessedDocument expectedResponse =
ProcessedDocument.newBuilder()
.setDocument(
DocumentName.ofProjectLocationDataStoreBranchDocumentName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]", "[DOCUMENT]")
.toString())
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
ProcessedDocument actualResponse = client.getProcessedDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getProcessedDocumentExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-2446/locations/location-2446/dataStores/dataStore-2446/branches/branche-2446/documents/document-2446";
client.getProcessedDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchGetDocumentsMetadataTest() throws Exception {
BatchGetDocumentsMetadataResponse expectedResponse =
BatchGetDocumentsMetadataResponse.newBuilder()
.addAllDocumentsMetadata(
new ArrayList<BatchGetDocumentsMetadataResponse.DocumentMetadata>())
.build();
mockService.addResponse(expectedResponse);
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
BatchGetDocumentsMetadataResponse actualResponse = client.batchGetDocumentsMetadata(parent);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchGetDocumentsMetadataExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BranchName parent =
BranchName.ofProjectLocationDataStoreBranchName(
"[PROJECT]", "[LOCATION]", "[DATA_STORE]", "[BRANCH]");
client.batchGetDocumentsMetadata(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchGetDocumentsMetadataTest2() throws Exception {
BatchGetDocumentsMetadataResponse expectedResponse =
BatchGetDocumentsMetadataResponse.newBuilder()
.addAllDocumentsMetadata(
new ArrayList<BatchGetDocumentsMetadataResponse.DocumentMetadata>())
.build();
mockService.addResponse(expectedResponse);
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
BatchGetDocumentsMetadataResponse actualResponse = client.batchGetDocumentsMetadata(parent);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchGetDocumentsMetadataExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent =
"projects/project-3187/locations/location-3187/dataStores/dataStore-3187/branches/branche-3187";
client.batchGetDocumentsMetadata(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 35,317 | java-gkehub/proto-google-cloud-gkehub-v1alpha/src/main/java/com/google/cloud/gkehub/servicemesh/v1alpha/MembershipState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1alpha/servicemesh/servicemesh.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.servicemesh.v1alpha;
/**
*
*
* <pre>
* **Service Mesh**: State for a single Membership, as analyzed by the Service
* Mesh Hub Controller.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.servicemesh.v1alpha.MembershipState}
*/
public final class MembershipState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.servicemesh.v1alpha.MembershipState)
MembershipStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use MembershipState.newBuilder() to construct.
private MembershipState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MembershipState() {
analysisMessages_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MembershipState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1alpha_MembershipState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1alpha_MembershipState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.class,
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.Builder.class);
}
public static final int ANALYSIS_MESSAGES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>
analysisMessages_;
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>
getAnalysisMessagesList() {
return analysisMessages_;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>
getAnalysisMessagesOrBuilderList() {
return analysisMessages_;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public int getAnalysisMessagesCount() {
return analysisMessages_.size();
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage getAnalysisMessages(
int index) {
return analysisMessages_.get(index);
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder
getAnalysisMessagesOrBuilder(int index) {
return analysisMessages_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < analysisMessages_.size(); i++) {
output.writeMessage(1, analysisMessages_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < analysisMessages_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, analysisMessages_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState other =
(com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState) obj;
if (!getAnalysisMessagesList().equals(other.getAnalysisMessagesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnalysisMessagesCount() > 0) {
hash = (37 * hash) + ANALYSIS_MESSAGES_FIELD_NUMBER;
hash = (53 * hash) + getAnalysisMessagesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* **Service Mesh**: State for a single Membership, as analyzed by the Service
* Mesh Hub Controller.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.servicemesh.v1alpha.MembershipState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.servicemesh.v1alpha.MembershipState)
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1alpha_MembershipState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1alpha_MembershipState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.class,
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.Builder.class);
}
// Construct using com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (analysisMessagesBuilder_ == null) {
analysisMessages_ = java.util.Collections.emptyList();
} else {
analysisMessages_ = null;
analysisMessagesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.servicemesh.v1alpha.ServiceMeshProto
.internal_static_google_cloud_gkehub_servicemesh_v1alpha_MembershipState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState getDefaultInstanceForType() {
return com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState build() {
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState buildPartial() {
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState result =
new com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState result) {
if (analysisMessagesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
analysisMessages_ = java.util.Collections.unmodifiableList(analysisMessages_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.analysisMessages_ = analysisMessages_;
} else {
result.analysisMessages_ = analysisMessagesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState) {
return mergeFrom((com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState other) {
if (other == com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState.getDefaultInstance())
return this;
if (analysisMessagesBuilder_ == null) {
if (!other.analysisMessages_.isEmpty()) {
if (analysisMessages_.isEmpty()) {
analysisMessages_ = other.analysisMessages_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnalysisMessagesIsMutable();
analysisMessages_.addAll(other.analysisMessages_);
}
onChanged();
}
} else {
if (!other.analysisMessages_.isEmpty()) {
if (analysisMessagesBuilder_.isEmpty()) {
analysisMessagesBuilder_.dispose();
analysisMessagesBuilder_ = null;
analysisMessages_ = other.analysisMessages_;
bitField0_ = (bitField0_ & ~0x00000001);
analysisMessagesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnalysisMessagesFieldBuilder()
: null;
} else {
analysisMessagesBuilder_.addAllMessages(other.analysisMessages_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage m =
input.readMessage(
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.parser(),
extensionRegistry);
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
analysisMessages_.add(m);
} else {
analysisMessagesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>
analysisMessages_ = java.util.Collections.emptyList();
private void ensureAnalysisMessagesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
analysisMessages_ =
new java.util.ArrayList<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>(
analysisMessages_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>
analysisMessagesBuilder_;
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>
getAnalysisMessagesList() {
if (analysisMessagesBuilder_ == null) {
return java.util.Collections.unmodifiableList(analysisMessages_);
} else {
return analysisMessagesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public int getAnalysisMessagesCount() {
if (analysisMessagesBuilder_ == null) {
return analysisMessages_.size();
} else {
return analysisMessagesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage getAnalysisMessages(
int index) {
if (analysisMessagesBuilder_ == null) {
return analysisMessages_.get(index);
} else {
return analysisMessagesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setAnalysisMessages(
int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) {
if (analysisMessagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnalysisMessagesIsMutable();
analysisMessages_.set(index, value);
onChanged();
} else {
analysisMessagesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setAnalysisMessages(
int index,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) {
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
analysisMessages_.set(index, builderForValue.build());
onChanged();
} else {
analysisMessagesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAnalysisMessages(
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) {
if (analysisMessagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnalysisMessagesIsMutable();
analysisMessages_.add(value);
onChanged();
} else {
analysisMessagesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAnalysisMessages(
int index, com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage value) {
if (analysisMessagesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnalysisMessagesIsMutable();
analysisMessages_.add(index, value);
onChanged();
} else {
analysisMessagesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAnalysisMessages(
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) {
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
analysisMessages_.add(builderForValue.build());
onChanged();
} else {
analysisMessagesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAnalysisMessages(
int index,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder builderForValue) {
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
analysisMessages_.add(index, builderForValue.build());
onChanged();
} else {
analysisMessagesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAllAnalysisMessages(
java.lang.Iterable<? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage>
values) {
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, analysisMessages_);
onChanged();
} else {
analysisMessagesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearAnalysisMessages() {
if (analysisMessagesBuilder_ == null) {
analysisMessages_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
analysisMessagesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder removeAnalysisMessages(int index) {
if (analysisMessagesBuilder_ == null) {
ensureAnalysisMessagesIsMutable();
analysisMessages_.remove(index);
onChanged();
} else {
analysisMessagesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder
getAnalysisMessagesBuilder(int index) {
return getAnalysisMessagesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder
getAnalysisMessagesOrBuilder(int index) {
if (analysisMessagesBuilder_ == null) {
return analysisMessages_.get(index);
} else {
return analysisMessagesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<
? extends com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>
getAnalysisMessagesOrBuilderList() {
if (analysisMessagesBuilder_ != null) {
return analysisMessagesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(analysisMessages_);
}
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder
addAnalysisMessagesBuilder() {
return getAnalysisMessagesFieldBuilder()
.addBuilder(
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder
addAnalysisMessagesBuilder(int index) {
return getAnalysisMessagesFieldBuilder()
.addBuilder(
index,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Results of running Service Mesh analyzers.
* </pre>
*
* <code>
* repeated .google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage analysis_messages = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder>
getAnalysisMessagesBuilderList() {
return getAnalysisMessagesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>
getAnalysisMessagesFieldBuilder() {
if (analysisMessagesBuilder_ == null) {
analysisMessagesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessage.Builder,
com.google.cloud.gkehub.servicemesh.v1alpha.AnalysisMessageOrBuilder>(
analysisMessages_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
analysisMessages_ = null;
}
return analysisMessagesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.servicemesh.v1alpha.MembershipState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.servicemesh.v1alpha.MembershipState)
private static final com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState();
}
public static com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MembershipState> PARSER =
new com.google.protobuf.AbstractParser<MembershipState>() {
@java.lang.Override
public MembershipState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MembershipState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MembershipState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.servicemesh.v1alpha.MembershipState getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-lang | 35,267 | src/test/java/org/apache/commons/lang3/RandomStringUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3;
import static org.apache.commons.lang3.LangAssertions.assertIllegalArgumentException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Random;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.EnabledIfSystemProperty;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.jupiter.params.provider.ValueSource;
/**
* Tests {@link RandomStringUtils}.
*/
class RandomStringUtilsTest extends AbstractLangTest {
private static final int LOOP_COUNT = 1_000;
/** Maximum safe value for count to avoid overflow: (21x + 3) / 5 + 10 < 0x0FFF_FFFF */
private static final int MAX_SAFE_COUNT = 63_913_201;
static Stream<RandomStringUtils> randomProvider() {
return Stream.of(RandomStringUtils.secure(), RandomStringUtils.secureStrong(), RandomStringUtils.insecure());
}
/**
* Computes Chi-Square statistic given observed and expected counts
*
* @param observed array of observed frequency counts
* @param expected array of expected frequency counts
*/
private double chiSquare(final int[] expected, final int[] observed) {
double sumSq = 0.0d;
for (int i = 0; i < observed.length; i++) {
final double dev = observed[i] - expected[i];
sumSq += dev * dev / expected[i];
}
return sumSq;
}
/**
* Test for LANG-1286. Creates situation where old code would overflow a char and result in a code point outside the specified range.
*/
@Test
void testCharOverflow() {
final int start = Character.MAX_VALUE;
final int end = Integer.MAX_VALUE;
@SuppressWarnings("serial")
final Random fixedRandom = new Random() {
@Override
public int nextInt(final int n) {
// Prevents selection of 'start' as the character
return super.nextInt(n - 1) + 1;
}
};
final String result = RandomStringUtils.random(2, start, end, false, false, null, fixedRandom);
final int c = result.codePointAt(0);
assertTrue(c >= start && c < end, String.format("Character '%d' not in range [%d,%d).", c, start, end));
}
@Test
void testConstructor() {
assertNotNull(new RandomStringUtils());
}
@Test
void testExceptionsRandom() {
assertIllegalArgumentException(() -> RandomStringUtils.random(-1));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, true, true));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, new char[] { 'a' }));
assertIllegalArgumentException(() -> RandomStringUtils.random(1, new char[0]));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, ""));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, (String) null));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, 'a', 'z', false, false));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, 'a', 'z', false, false, new char[] { 'a' }));
assertIllegalArgumentException(() -> RandomStringUtils.random(-1, 'a', 'z', false, false, new char[] { 'a' }, new Random()));
assertIllegalArgumentException(() -> RandomStringUtils.random(8, 32, 48, false, true));
assertIllegalArgumentException(() -> RandomStringUtils.random(8, 32, 65, true, false));
assertIllegalArgumentException(() -> RandomStringUtils.random(1, Integer.MIN_VALUE, -10, false, false, null));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandom(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.next(-1));
assertIllegalArgumentException(() -> rsu.next(-1, true, true));
assertIllegalArgumentException(() -> rsu.next(-1, new char[] { 'a' }));
assertIllegalArgumentException(() -> rsu.next(1, new char[0]));
assertIllegalArgumentException(() -> rsu.next(-1, ""));
assertIllegalArgumentException(() -> rsu.next(-1, (String) null));
assertIllegalArgumentException(() -> rsu.next(-1, 'a', 'z', false, false));
assertIllegalArgumentException(() -> rsu.next(-1, 'a', 'z', false, false, new char[] { 'a' }));
assertIllegalArgumentException(() -> rsu.next(8, 32, 48, false, true));
assertIllegalArgumentException(() -> rsu.next(8, 32, 65, true, false));
assertIllegalArgumentException(() -> rsu.next(1, Integer.MIN_VALUE, -10, false, false, null));
}
@Test
void testExceptionsRandomAlphabetic() {
assertIllegalArgumentException(() -> RandomStringUtils.randomAlphabetic(-1));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandomAlphabetic(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.nextAlphabetic(-1));
}
@Test
void testExceptionsRandomAscii() {
assertIllegalArgumentException(() -> RandomStringUtils.randomAscii(-1));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandomAscii(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.nextAscii(-1));
}
@Test
void testExceptionsRandomGraph() {
assertIllegalArgumentException(() -> RandomStringUtils.randomGraph(-1));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandomGraph(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.nextGraph(-1));
}
@Test
void testExceptionsRandomNumeric() {
assertIllegalArgumentException(() -> RandomStringUtils.randomNumeric(-1));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandomNumeric(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.nextNumeric(-1));
}
@Test
void testExceptionsRandomPrint() {
assertIllegalArgumentException(() -> RandomStringUtils.randomPrint(-1));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testExceptionsRandomPrint(final RandomStringUtils rsu) {
assertIllegalArgumentException(() -> rsu.nextPrint(-1));
}
/**
* Test homogeneity of random strings generated -- i.e., test that characters show up with expected frequencies in generated strings. Will fail randomly
* about 1 in 100,000 times. Repeated failures indicate a problem.
*
* @param rsu the instance to test.
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testHomogeneity(final RandomStringUtils rsu) {
final String set = "abc";
final char[] chars = set.toCharArray();
final int[] counts = { 0, 0, 0 };
final int[] expected = { 200, 200, 200 };
for (int i = 0; i < 100; i++) {
final String gen = rsu.next(6, chars);
for (int j = 0; j < 6; j++) {
switch (gen.charAt(j)) {
case 'a': {
counts[0]++;
break;
}
case 'b': {
counts[1]++;
break;
}
case 'c': {
counts[2]++;
break;
}
default: {
fail("generated character not in set");
}
}
}
}
// Perform chi-square test with degrees of freedom = 3-1 = 2, testing at 1e-5 level.
// This expects a failure rate of 1 in 100,000.
// critical value: from scipy.stats import chi2; chi2(2).isf(1e-5)
assertTrue(chiSquare(expected, counts) < 23.025850929940457d, "test homogeneity -- will fail about 1 in 100,000 times");
}
@ParameterizedTest
@ValueSource(ints = {MAX_SAFE_COUNT, MAX_SAFE_COUNT + 1})
@EnabledIfSystemProperty(named = "test.large.heap", matches = "true")
void testHugeStrings(final int expectedLength) {
final String hugeString = RandomStringUtils.random(expectedLength);
assertEquals(expectedLength, hugeString.length(), "hugeString.length() == expectedLength");
}
/**
* Checks if the string got by {@link RandomStringUtils#random(int)} can be converted to UTF-8 and back without loss.
*
* @see <a href="https://issues.apache.org/jira/browse/LANG-100">LANG-100</a>
*/
@Test
void testLang100() {
final int size = 5000;
final Charset charset = StandardCharsets.UTF_8;
final String orig = RandomStringUtils.random(size);
final byte[] bytes = orig.getBytes(charset);
final String copy = new String(bytes, charset);
// for a verbose compare:
for (int i = 0; i < orig.length() && i < copy.length(); i++) {
final char o = orig.charAt(i);
final char c = copy.charAt(i);
assertEquals(o, c, "differs at " + i + "(" + Integer.toHexString(Character.valueOf(o).hashCode()) + ","
+ Integer.toHexString(Character.valueOf(c).hashCode()) + ")");
}
// compare length also
assertEquals(orig.length(), copy.length());
// just to be complete
assertEquals(orig, copy);
}
/**
* Checks if the string got by {@link RandomStringUtils#random(int)} can be converted to UTF-8 and back without loss.
*
* @param rsu the instance to test
* @see <a href="https://issues.apache.org/jira/browse/LANG-100">LANG-100</a>
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testLang100(final RandomStringUtils rsu) {
final int size = 5000;
final Charset charset = StandardCharsets.UTF_8;
final String orig = rsu.next(size);
final byte[] bytes = orig.getBytes(charset);
final String copy = new String(bytes, charset);
// for a verbose compare:
for (int i = 0; i < orig.length() && i < copy.length(); i++) {
final char o = orig.charAt(i);
final char c = copy.charAt(i);
assertEquals(o, c, "differs at " + i + "(" + Integer.toHexString(Character.valueOf(o).hashCode()) + ","
+ Integer.toHexString(Character.valueOf(c).hashCode()) + ")");
}
// compare length also
assertEquals(orig.length(), copy.length());
// just to be complete
assertEquals(orig, copy);
}
@Test
void testLANG805() {
final long seedMillis = System.currentTimeMillis();
assertEquals("aaa", RandomStringUtils.random(3, 0, 0, false, false, new char[] { 'a' }, new Random(seedMillis)));
}
@ParameterizedTest
@MethodSource("randomProvider")
void testLANG807(final RandomStringUtils rsu) {
final IllegalArgumentException ex = assertIllegalArgumentException(() -> rsu.next(3, 5, 5, false, false));
final String msg = ex.getMessage();
assertTrue(msg.contains("start"), "Message (" + msg + ") must contain 'start'");
assertTrue(msg.contains("end"), "Message (" + msg + ") must contain 'end'");
}
/**
* Test {@code RandomStringUtils.random} works appropriately when letters=true
* and the range does not only include ASCII letters.
* Fails with probability less than 2^-40 (in practice this never happens).
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testNonASCIILetters(final RandomStringUtils rsu) {
// Check that the following create a string with 10 characters 0x4e00 (a non-ASCII letter)
String r1 = rsu.next(10, 0x4e00, 0x4e01, true, false);
assertEquals(10, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertEquals(0x4e00, r1.charAt(i), "characters not all equal to 0x4e00");
}
// Same with both letters=true and numbers=true
r1 = rsu.next(10, 0x4e00, 0x4e01, true, true);
assertEquals(10, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertEquals(0x4e00, r1.charAt(i), "characters not all equal to 0x4e00");
}
// Check that at least one letter is not ASCII
boolean found = false;
r1 = rsu.next(40, 'F', 0x3000, true, false);
assertEquals(40, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertTrue(Character.isLetter(r1.charAt(i)), "characters not all letters");
if (r1.charAt(i) > 0x7f) {
found = true;
}
}
assertTrue(found, "no non-ASCII letter generated");
}
/**
* Test {@code RandomStringUtils.random} works appropriately when numbers=true
* and the range does not only include ASCII numbers/digits.
* Fails with probability less than 2^-40 (in practice this never happens).
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testNonASCIINumbers(final RandomStringUtils rsu) {
// Check that the following create a string with 10 characters 0x0660 (a non-ASCII digit)
String r1 = rsu.next(10, 0x0660, 0x0661, false, true);
assertEquals(10, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertEquals(0x0660, r1.charAt(i), "characters not all equal to 0x0660");
}
// Same with both letters=true and numbers=true
r1 = rsu.next(10, 0x0660, 0x0661, true, true);
assertEquals(10, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertEquals(0x0660, r1.charAt(i), "characters not all equal to 0x0660");
}
// Check that at least one letter is not ASCII
boolean found = false;
r1 = rsu.next(40, 'F', 0x3000, false, true);
assertEquals(40, r1.length(), "wrong length");
for (int i = 0; i < r1.length(); i++) {
assertTrue(Character.isDigit(r1.charAt(i)), "characters not all numbers");
if (r1.charAt(i) > 0x7f) {
found = true;
}
}
assertTrue(found, "no non-ASCII number generated");
}
/**
* Make sure boundary alpha characters are generated by randomAlphabetic This test will fail randomly with probability = 4 * (51/52)**1000 ~ 1.58E-8
*/
@Test
void testRandomAlphabetic() {
final char[] testChars = { 'a', 'z', 'A', 'Z' };
final boolean[] found = { false, false, false, false };
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = RandomStringUtils.randomAlphabetic(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "alphanumeric character not generated in 1000 attempts: " + testChars[i] + " -- repeated failures indicate a problem ");
}
}
/**
* Make sure boundary alpha characters are generated by randomAlphabetic This test will fail randomly with probability = 4 * (51/52)**1000 ~ 1.58E-8
*
* @param rsu the instance to test
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomAlphabetic(final RandomStringUtils rsu) {
final char[] testChars = { 'a', 'z', 'A', 'Z' };
final boolean[] found = { false, false, false, false };
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = rsu.nextAlphabetic(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "alphanumeric character not generated in 1000 attempts: " + testChars[i] + " -- repeated failures indicate a problem ");
}
}
@Test
void testRandomAlphabeticRange() {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Alpha}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = RandomStringUtils.randomAlphabetic(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomAlphabeticRange(final RandomStringUtils rsu) {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Alpha}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = rsu.nextAlphabetic(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
/**
* Make sure boundary alphanumeric characters are generated by randomAlphaNumeric This test will fail randomly with probability = 6 * (61/62)**1000 ~ 5.2E-7
*/
@Test
void testRandomAlphaNumeric() {
final char[] testChars = { 'a', 'z', 'A', 'Z', '0', '9' };
final boolean[] found = { false, false, false, false, false, false };
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = RandomStringUtils.randomAlphanumeric(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "alphanumeric character not generated in 1000 attempts: " + testChars[i] + " -- repeated failures indicate a problem ");
}
}
/**
* Make sure boundary alphanumeric characters are generated by randomAlphaNumeric This test will fail randomly with probability = 6 * (61/62)**1000 ~ 5.2E-7
*
* @param rsu the instance to test
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomAlphaNumeric(final RandomStringUtils rsu) {
final char[] testChars = { 'a', 'z', 'A', 'Z', '0', '9' };
final boolean[] found = { false, false, false, false, false, false };
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = rsu.nextAlphanumeric(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "alphanumeric character not generated in 1000 attempts: " + testChars[i] + " -- repeated failures indicate a problem ");
}
}
@Test
void testRandomAlphanumericRange() {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Alnum}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = RandomStringUtils.randomAlphanumeric(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
/**
* Test the implementation
*
* @param rsu the instance to test.
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomApis(final RandomStringUtils rsu) {
String r1 = rsu.next(50);
assertEquals(50, r1.length(), "random(50) length");
String r2 = rsu.next(50);
assertEquals(50, r2.length(), "random(50) length");
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextAscii(50);
assertEquals(50, r1.length(), "randomAscii(50) length");
for (int i = 0; i < r1.length(); i++) {
final int ch = r1.charAt(i);
assertTrue(ch >= 32, "char >= 32");
assertTrue(ch <= 127, "char <= 127");
}
r2 = rsu.nextAscii(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextAlphabetic(50);
assertEquals(50, r1.length(), "randomAlphabetic(50)");
for (int i = 0; i < r1.length(); i++) {
assertTrue(Character.isLetter(r1.charAt(i)) && !Character.isDigit(r1.charAt(i)), "r1 contains alphabetic");
}
r2 = rsu.nextAlphabetic(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextAlphanumeric(50);
assertEquals(50, r1.length(), "randomAlphanumeric(50)");
for (int i = 0; i < r1.length(); i++) {
assertTrue(Character.isLetterOrDigit(r1.charAt(i)), "r1 contains alphanumeric");
}
r2 = rsu.nextAlphabetic(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextGraph(50);
assertEquals(50, r1.length(), "randomGraph(50) length");
for (int i = 0; i < r1.length(); i++) {
assertTrue(r1.charAt(i) >= 33 && r1.charAt(i) <= 126, "char between 33 and 126");
}
r2 = rsu.nextGraph(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextNumeric(50);
assertEquals(50, r1.length(), "randomNumeric(50)");
for (int i = 0; i < r1.length(); i++) {
assertTrue(Character.isDigit(r1.charAt(i)) && !Character.isLetter(r1.charAt(i)), "r1 contains numeric");
}
r2 = rsu.nextNumeric(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.nextPrint(50);
assertEquals(50, r1.length(), "randomPrint(50) length");
for (int i = 0; i < r1.length(); i++) {
assertTrue(r1.charAt(i) >= 32 && r1.charAt(i) <= 126, "char between 32 and 126");
}
r2 = rsu.nextPrint(50);
assertNotEquals(r1, r2, "!r1.equals(r2)");
String set = "abcdefg";
r1 = rsu.next(50, set);
assertEquals(50, r1.length(), "random(50, \"abcdefg\")");
for (int i = 0; i < r1.length(); i++) {
assertTrue(set.indexOf(r1.charAt(i)) > -1, "random char in set");
}
r2 = rsu.next(50, set);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.next(50, (String) null);
assertEquals(50, r1.length(), "random(50) length");
r2 = rsu.next(50, (String) null);
assertEquals(50, r2.length(), "random(50) length");
assertNotEquals(r1, r2, "!r1.equals(r2)");
set = "stuvwxyz";
r1 = rsu.next(50, set.toCharArray());
assertEquals(50, r1.length(), "random(50, \"stuvwxyz\")");
for (int i = 0; i < r1.length(); i++) {
assertTrue(set.indexOf(r1.charAt(i)) > -1, "random char in set");
}
r2 = rsu.next(50, set);
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.next(50, (char[]) null);
assertEquals(50, r1.length(), "random(50) length");
r2 = rsu.next(50, (char[]) null);
assertEquals(50, r2.length(), "random(50) length");
assertNotEquals(r1, r2, "!r1.equals(r2)");
r1 = rsu.next(0);
assertEquals("", r1, "random(0).equals(\"\")");
}
/**
* Make sure 32 and 127 are generated by randomNumeric This test will fail randomly with probability = 2*(95/96)**1000 ~ 5.7E-5
*
* @param rsu the instance to test
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomAscii(final RandomStringUtils rsu) {
final char[] testChars = { (char) 32, (char) 126 };
final boolean[] found = { false, false };
// Test failures have been observed on GitHub builds with a 100 limit.
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = rsu.nextAscii(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "ascii character not generated in 1000 attempts: " + (int) testChars[i] + " -- repeated failures indicate a problem");
}
}
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomAsciiRange(final RandomStringUtils rsu) {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{ASCII}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = rsu.nextAscii(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomGraphRange(final RandomStringUtils rsu) {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Graph}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = rsu.nextGraph(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
/**
* Make sure '0' and '9' are generated by randomNumeric This test will fail randomly with probability = 2 * (9/10)**1000 ~ 3.5E-46
*
* @param rsu the instance to test
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomNumeric(final RandomStringUtils rsu) {
final char[] testChars = { '0', '9' };
final boolean[] found = { false, false };
for (int i = 0; i < LOOP_COUNT; i++) {
final String randString = rsu.nextNumeric(10);
for (int j = 0; j < testChars.length; j++) {
if (randString.indexOf(testChars[j]) > 0) {
found[j] = true;
}
}
}
for (int i = 0; i < testChars.length; i++) {
assertTrue(found[i], "digit not generated in 1000 attempts: " + testChars[i] + " -- repeated failures indicate a problem ");
}
}
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomNumericRange(final RandomStringUtils rsu) {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Digit}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = rsu.nextNumeric(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
@Test
void testRandomParameter() {
final long seedMillis = System.currentTimeMillis();
final String r1 = RandomStringUtils.random(50, 0, 0, true, true, null, new Random(seedMillis));
final String r2 = RandomStringUtils.random(50, 0, 0, true, true, null, new Random(seedMillis));
assertEquals(r1, r2, "r1.equals(r2)");
}
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomPrintRange(final RandomStringUtils rsu) {
final int expectedMinLengthInclusive = 1;
final int expectedMaxLengthExclusive = 11;
final String pattern = "^\\p{Print}{" + expectedMinLengthInclusive + ',' + expectedMaxLengthExclusive + "}$";
int maxCreatedLength = expectedMinLengthInclusive;
int minCreatedLength = expectedMaxLengthExclusive - 1;
for (int i = 0; i < LOOP_COUNT; i++) {
final String s = rsu.nextPrint(expectedMinLengthInclusive, expectedMaxLengthExclusive);
assertTrue(s.length() >= expectedMinLengthInclusive, "within range");
assertTrue(s.length() <= expectedMaxLengthExclusive - 1, "within range");
assertTrue(s.matches(pattern), s);
if (s.length() < minCreatedLength) {
minCreatedLength = s.length();
}
if (s.length() > maxCreatedLength) {
maxCreatedLength = s.length();
}
}
assertEquals(expectedMinLengthInclusive, minCreatedLength, "min generated, may fail randomly rarely");
assertEquals(expectedMaxLengthExclusive - 1, maxCreatedLength, "max generated, may fail randomly rarely");
}
/**
* Test {@code RandomStringUtils.random} works appropriately when chars specified.
*
* @param rsu the instance to test.
*/
@ParameterizedTest
@MethodSource("randomProvider")
void testRandomWithChars(final RandomStringUtils rsu) {
final char[] digitChars = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
final String r1 = rsu.next(50, 0, 0, true, true, digitChars);
assertEquals(50, r1.length(), "randomNumeric(50)");
for (int i = 0; i < r1.length(); i++) {
assertTrue(
Character.isDigit(r1.charAt(i)) && !Character.isLetter(r1.charAt(i)),
"r1 contains numeric");
}
final String r2 = rsu.nextNumeric(50);
assertNotEquals(r1, r2);
final String r3 = rsu.next(50, 0, 0, true, true, digitChars);
assertNotEquals(r1, r3);
assertNotEquals(r2, r3);
}
}
|
apache/jena | 35,196 | jena-fuseki2/jena-fuseki-main/src/main/java/org/apache/jena/fuseki/main/cmds/FusekiMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.main.cmds;
import static arq.cmdline.ModAssembler.assemblerDescDecl;
import static org.apache.jena.fuseki.main.cmds.SetupType.*;
import java.net.BindException;
import java.nio.file.Path;
import java.util.List;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.stream.Stream;
import arq.cmdline.CmdARQ;
import arq.cmdline.ModDatasetAssembler;
import org.apache.jena.assembler.exceptions.AssemblerException;
import org.apache.jena.atlas.io.IOX;
import org.apache.jena.atlas.lib.FileOps;
import org.apache.jena.atlas.web.AuthScheme;
import org.apache.jena.cmd.*;
import org.apache.jena.fuseki.Fuseki;
import org.apache.jena.fuseki.FusekiException;
import org.apache.jena.fuseki.main.FusekiServer;
import org.apache.jena.fuseki.main.sys.FusekiModule;
import org.apache.jena.fuseki.main.sys.FusekiModules;
import org.apache.jena.fuseki.main.sys.FusekiServerArgsCustomiser;
import org.apache.jena.fuseki.main.sys.InitFusekiMain;
import org.apache.jena.fuseki.server.DataAccessPoint;
import org.apache.jena.fuseki.server.DataAccessPointRegistry;
import org.apache.jena.fuseki.server.FusekiCoreInfo;
import org.apache.jena.fuseki.servlets.SPARQL_QueryGeneral;
import org.apache.jena.fuseki.validation.*;
import org.apache.jena.query.ARQ;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.riot.RDFParser;
import org.apache.jena.sparql.core.assembler.AssemblerUtils;
import org.apache.jena.sys.JenaSystem;
import org.slf4j.Logger;
public class FusekiMain extends CmdARQ {
/** Default HTTP port when running from the command line. */
public static int defaultPort = 3030;
/** Default HTTPS port when running from the command line. */
public static int defaultHttpsPort = 3043;
/**
* Build, but do not start, a server based on command line syntax.
*/
public static FusekiServer build(String... args) {
FusekiServer.Builder builder = builder(args);
return builder.build();
}
/**
* Create a server and run, within the same JVM.
* This is the command line entry point.
* This function does not return.
* See also {@link #build} to create and return a server.
*/
public static void run(String... argv) {
JenaSystem.init();
InitFusekiMain.init();
new FusekiMain(argv).mainRun();
}
private static ArgDecl argMem = new ArgDecl(ArgDecl.NoValue, "mem");
private static ArgDecl argUpdate = new ArgDecl(ArgDecl.NoValue, "update", "allowUpdate");
private static ArgDecl argFile = new ArgDecl(ArgDecl.HasValue, "file");
private static ArgDecl argTDB1mode = new ArgDecl(ArgDecl.NoValue, "tdb1");
private static ArgDecl argTDB2mode = new ArgDecl(ArgDecl.NoValue, "tdb2");
private static ArgDecl argMemTDB = new ArgDecl(ArgDecl.NoValue, "memtdb", "memTDB", "tdbmem");
private static ArgDecl argTDB = new ArgDecl(ArgDecl.HasValue, "loc", "location", "tdb");
// RDFS vocabulary applied to command line defined dataset.
private static ArgDecl argRDFS = new ArgDecl(ArgDecl.HasValue, "rdfs");
// No SPARQL dataset or services
private static ArgDecl argEmpty = new ArgDecl(ArgDecl.NoValue, "empty", "no-dataset");
private static ArgDecl argGeneralQuerySvc = new ArgDecl(ArgDecl.HasValue, "general");
private static ArgDecl argPort = new ArgDecl(ArgDecl.HasValue, "port");
private static ArgDecl argLocalhost = new ArgDecl(ArgDecl.NoValue, "localhost", "local");
private static ArgDecl argTimeout = new ArgDecl(ArgDecl.HasValue, "timeout");
private static ArgDecl argConfig = new ArgDecl(ArgDecl.HasValue, "config", "conf");
private static ArgDecl argJettyConfig = new ArgDecl(ArgDecl.HasValue, "jetty-config", "jetty");
private static ArgDecl argGZip = new ArgDecl(ArgDecl.HasValue, "gzip");
// Set the servlet context path (the initial path for URLs.) for any datasets.
// A context of "/path" and a dataset name of "/ds", service "sparql" is accessed as "/path/ds/sparql"
private static ArgDecl argContextPath = new ArgDecl(ArgDecl.HasValue, "pathBase", "contextPath", "pathbase", "contextpath");
// Static files. URLs are affected by argPathBase
private static ArgDecl argBase = new ArgDecl(ArgDecl.HasValue, "base", "files");
private static ArgDecl argCORS = new ArgDecl(ArgDecl.HasValue, "withCORS", "cors", "CORS", "cors-config");
private static ArgDecl argNoCORS = new ArgDecl(ArgDecl.NoValue, "noCORS", "no-cors");
private static ArgDecl argWithPing = new ArgDecl(ArgDecl.NoValue, "withPing", "ping");
private static ArgDecl argWithStats = new ArgDecl(ArgDecl.NoValue, "withStats", "stats");
private static ArgDecl argWithMetrics = new ArgDecl(ArgDecl.NoValue, "withMetrics", "metrics");
private static ArgDecl argWithCompact = new ArgDecl(ArgDecl.NoValue, "withCompact", "compact");
// Use modules found by the ServiceLoader. Currently, no-op.
private static ArgDecl argEnableModules = new ArgDecl(ArgDecl.HasValue, "modules", "fuseki-modules");
private static ArgDecl argAuth = new ArgDecl(ArgDecl.HasValue, "auth");
private static ArgDecl argHttps = new ArgDecl(ArgDecl.HasValue, "https");
private static ArgDecl argHttpsPort = new ArgDecl(ArgDecl.HasValue, "httpsPort", "httpsport", "sport");
private static ArgDecl argPasswdFile = new ArgDecl(ArgDecl.HasValue, "passwd");
private static ArgDecl argRealm = new ArgDecl(ArgDecl.HasValue, "realm");
// Same as --empty --validators --general=/sparql, --files=ARG
private static ArgDecl argSparqler = new ArgDecl(ArgDecl.HasValue, "sparqler");
private static ArgDecl argValidators = new ArgDecl(ArgDecl.NoValue, "validators");
private ModDatasetAssembler modDataset = new ModDatasetAssembler();
private List<FusekiServerArgsCustomiser> customiseServerArgs;
private final ServerArgs serverArgs = new ServerArgs();
// Default
private boolean useTDB2 = true;
// -- Programmatic ways to create a server using command line syntax.
/**
* Create a {@link org.apache.jena.fuseki.main.FusekiServer.Builder} which has
* been setup according to the command line arguments.
* The builder can be further modified.
*/
public static FusekiServer.Builder builder(String... args) {
// Parses command line, sets arguments.
FusekiMain fusekiMain = new FusekiMain(args);
// Process command line args according to the arguments specified.
fusekiMain.process();
// Apply serverArgs to a builder.
FusekiServer.Builder builder = FusekiServer.create();
fusekiMain.applyServerArgs(builder, fusekiMain.serverArgs);
return builder;
}
/**
* Registers a custom arguments module.
*
* @deprecated Register a {@link org.apache.jena.fuseki.main.sys.FusekiServerArgsCustomiser} via
* {@link #addCustomiser(FusekiServerArgsCustomiser)} instead.
*/
@Deprecated(forRemoval = true)
public static void addArgModule(ArgModuleGeneral argModule) {
FusekiServerArgsCustomiser customiser =
new FusekiServerArgsCustomiser() {
final ArgModuleGeneral argMod = argModule;
@Override
public void serverArgsModify(CmdGeneral fusekiCmd, ServerArgs serverArgs) {
fusekiCmd.addModule(argMod);
}
@Override
public void serverArgsPrepare(CmdGeneral fusekiCmd, ServerArgs serverArgs) {
argMod.processArgs(fusekiCmd);
}
};
addCustomiser(customiser);
}
/**
* Registers a CLI customiser
* <p>
* A CLI customiser can add one/more custom arguments into the Fuseki Server CLI arguments and then can apply those
* to the Fuseki server being built during the processing of {@link #processModulesAndArgs()}. This allows for
* custom arguments that directly affect how the Fuseki server is built to be created.
* </p>
* @param customiser CLI customiser
*/
public static void addCustomiser(FusekiServerArgsCustomiser customiser) {
Objects.requireNonNull(customiser);
ArgCustomizers.addCustomiser(customiser);
}
/**
* Registers CLI customisers.
* <p>
* CLI customisers can add one/more custom arguments into the Fuseki Server CLI arguments and then can apply those
* to the Fuseki server being built during the processing of {@link #processModulesAndArgs()}. This allows for
* custom arguments that directly affect how the Fuseki server is built to be created.
* </p>
* @see #addCustomiser(FusekiServerArgsCustomiser)
*/
public static void addCustomisers(FusekiModules customiserSet) {
Objects.requireNonNull(customiserSet);
customiserSet.forEach(customiser->ArgCustomizers.addCustomiser(customiser));
}
/**
* Resets any previously registered CLI customisers
*/
public static void resetCustomisers() {
ArgCustomizers.resetCustomisers();
}
private void applyCustomisers(Consumer<FusekiServerArgsCustomiser> action) {
for (FusekiServerArgsCustomiser customiser : customiseServerArgs) {
action.accept(customiser);
}
}
// Customiser Lifecycle
//
// :: Setup args
// -- Called at the end of FusekiMain.argumentsSetup() after the standard arguments have been added.
// ---- FusekiServerArgsCustomiser.serverArgsModify(CmdLineGeneral, ServerArgs)
//
// :: Get values
// -- End of FusekiMain.processModulesAndArgs
// ---- FusekiServerArgsCustomiser.serverArgsPrepare(CmdGeneral fusekiCmd, ServerArgs serverArgs)
// :: Prepare server builder
// -- End of applyServerArgs
// ---- FusekiServerArgsCustomiser.serverArgsBuilder(FusekiServer.Builder serverBuilder, Model configModel)
// == Enter the build lifecycle in builder.build().
// Decide FusekiModules for the build.
// --> then into FusekiBuildCycle.prepare(FusekiServer.Builder serverBuilder, Set<String> datasetNames, Model configModel) { }
protected FusekiMain(String... argv) {
super(argv);
// Snapshot for this FusekiMain instance
customiseServerArgs = List.copyOf(ArgCustomizers.get());
argumentsSetup();
}
private void argumentsSetup() {
modVersion.addClass("Fuseki", Fuseki.class);
getUsage().startCategory("Fuseki");
add(argConfig, "--config=FILE",
"Use a configuration file to determine the services");
// ---- Describe the dataset on the command line.
add(argMem, "--mem",
"Create an in-memory, non-persistent dataset for the server");
add(argFile, "--file=FILE",
"Create an in-memory, non-persistent dataset for the server, initialised with the contents of the file");
add(argTDB2mode, "--tdb2",
"Use TDB2 for command line persistent datasets");
add(argTDB1mode, "--tdb1",
"Use TDB1 for command line persistent datasets (default is TDB2)");
add(argTDB, "--loc=DIR",
"Use an existing TDB database (or create if does not exist)");
add(argMemTDB, "--memTDB",
"Create an in-memory, non-persistent dataset using TDB (testing only)");
add(argRDFS, "--rdfs=FILE",
"Apply RDFS on top of the dataset");
add(argUpdate, "--update",
"Allow updates (via SPARQL Update and SPARQL HTTP Update)");
addModule(modDataset);
// ---- Server setup
add(argEmpty); // Hidden
add(argPort, "--port",
"Listen on this port number");
add(argLocalhost, "--localhost",
"Listen only on the localhost interface");
add(argGZip, "--gzip=on|off",
"Enable GZip compression (HTTP Accept-Encoding) if request header set");
add(argBase, "--base=DIR",
"Directory for static content");
add(argContextPath, "--contextPath=PATH",
"Context path for the server");
add(argHttps, "--https=CONF",
"https certificate access details. JSON file { \"cert\":FILE , \"passwd\"; SECRET } ");
add(argHttpsPort, "--httpsPort=NUM",
"https port (default port is 3043)");
add(argPasswdFile, "--passwd=FILE",
"Password file");
add(argTimeout, "--timeout=",
"Global timeout applied to queries (value in ms) -- format is X[,Y] ");
// ---- Servlets
add(argSparqler, "--sparqler=DIR",
"Run with SPARQLer services Directory for static content");
add(argValidators, "--validators",
"Install validators");
add(argGeneralQuerySvc, "--general=PATH",
"Add a general SPARQL endpoint (without a dataset) at /PATH");
add(argAuth, "--auth=[basic|digest]",
"Run the server using basic or digest authentication");
add(argJettyConfig, "--jetty=FILE",
"jetty.xml server configuration");
add(argCORS, "--cors=FILE", "Configure CORS settings from file");
add(argNoCORS, "--no-cors", "Disable CORS");
add(argWithPing, "--ping", "Enable /$/ping");
add(argWithStats, "--stats", "Enable /$/stats");
add(argWithMetrics, "--metrics", "Enable /$/metrics");
add(argWithCompact, "--compact", "Enable /$/compact/*");
add(argEnableModules, "--modules=true|false", "Enable Fuseki autoloaded modules");
applyCustomisers(customiser -> customiser.serverArgsModify(this, serverArgs));
}
static String argUsage = "[--config=FILE|--mem|--loc=DIR|--file=FILE] [--port PORT] /DatasetPathName";
@Override
protected String getSummary() {
return getCommandName() + " " + argUsage;
}
@Override
protected void processModulesAndArgs() {
Logger log = Fuseki.serverLog;
serverArgs.verboseLogging = super.isVerbose();
if ( ! serverArgs.bypassStdArgs )
processStdArguments(log);
// Have customisers process their own arguments.
applyCustomisers(customiser->customiser.serverArgsPrepare(this, serverArgs));
}
private void processStdArguments(Logger log) {
// ---- Command line definition of setup
// One dataset
// or a config file
// or a "standard setup" e.g.SPARQLer
// or empty allowed
int numDefinitions = 0;
SetupType setup = UNSET;
if ( contains(argMem) ) {
setup = MEM;
numDefinitions++;
}
if ( contains(argFile) ) {
setup = FILE;
numDefinitions++;
}
if ( contains(assemblerDescDecl) ) {
setup = ASSEM;
numDefinitions++;
}
if ( contains(argTDB) ) {
setup = TDB;
numDefinitions++;
}
if ( contains(argMemTDB) ) {
setup = MEMTDB;
numDefinitions++;
}
if ( contains(argConfig) ) {
setup = CONF;
numDefinitions++;
}
if ( contains(argEmpty) ) {
setup = NONE;
//numDefinitions++;
}
if ( contains(argSparqler) ) {
setup = SPARQLer;
//numDefinitions++;
}
// ---- Validation
if ( setup == UNSET && serverArgs.allowEmpty )
setup = NONE;
// Starting empty.
boolean startEmpty = ( setup == NONE || setup == SPARQLer );
if ( numDefinitions > 1 )
throw new CmdException("Multiple ways providing a dataset. Only one of --mem, --file, --loc or --conf");
if ( startEmpty && numDefinitions > 0 )
throw new CmdException("Dataset provided but 'no dataset' flag given");
if ( startEmpty && ! getPositional().isEmpty() )
throw new CmdException("Dataset name provided but 'no dataset' flag given");
if ( ! startEmpty && numDefinitions == 0 )
throw new CmdException("No dataset or configuration specified on the command line");
// Configuration file OR command line dataset
if ( contains(argConfig) ) {
// Invalid combination: --conf + arguments related to command line setup.
if ( ! getPositional().isEmpty() )
throw new CmdException("Can't have both a configuration file and a service name");
if ( contains(argRDFS) )
throw new CmdException("Need to define RDFS setup in the configuration file");
} else {
// No --conf
if ( getPositional().size() > 1 )
throw new CmdException("Multiple dataset path names given");
if ( ! startEmpty && getPositional().size() == 0 ) {
if ( setup == UNSET )
throw new CmdException("Missing dataset description and service name");
else
throw new CmdException("Missing service name");
}
// Finally!
if ( getPositional().size() == 1 )
serverArgs.datasetPath = DataAccessPoint.canonical(getPositionalArg(0));
}
// ---- check: Invalid: --update + --conf
if ( contains(argUpdate) && contains(argConfig) )
throw new CmdException("--update and a configuration file does not make sense (control using the configuration file only)");
boolean allowUpdate = contains(argUpdate);
serverArgs.allowUpdate = allowUpdate;
// -- Record the choice.
serverArgs.setup = setup;
serverArgs.datasetDescription = "<unset>";
// ---- Dataset
// A server has one of the command line dataset setups or a configuration file,
// or "--empty" or "--sparqler"
// Only one of these is chosen from the checking above.
// Which TDB to use to create a command line TDB database.
if ( contains(argTDB1mode) )
useTDB2 = false;
if ( contains(argTDB2mode) )
useTDB2 = true;
switch(setup) {
case CONF->{
serverArgs.serverConfigFile = getValue(argConfig);
}
case MEM->{
serverArgs.dsgMaker = args->DSGSetup.setupMem(log, args);
}
case FILE->{
List<String> filenames = getValues(argFile);
serverArgs.dsgMaker = args->DSGSetup.setupFile(log, filenames, args);
}
case TDB->{
String directory = getValue(argTDB);
serverArgs.dsgMaker = args->DSGSetup.setupTDB(log, directory, useTDB2, args);
}
case NONE->{
serverArgs.startEmpty = true;
serverArgs.datasetDescription = "No dataset";
}
case ASSEM->{
serverArgs.dsgMaker = args->DSGSetup.setupAssembler(log, modDataset, args);
}
case MEMTDB->{
DSGSetup.setupMemTDB(log, useTDB2, serverArgs);
}
case UNSET->{
throw new CmdException("Internal error");
}
case SPARQLer -> {
String filebase = getValue(argSparqler);
if ( !FileOps.exists(filebase) )
throw new CmdException("File area not found: " + filebase);
serverArgs.contentDirectory = filebase;
serverArgs.addGeneral = "/sparql";
serverArgs.startEmpty = true;
serverArgs.validators = true;
}
default -> throw new IllegalArgumentException("Unexpected value: " + setup);
}
// ---- RDFS
if ( contains(argRDFS) ) {
String rdfsVocab = getValue(argRDFS);
if ( !FileOps.exists(rdfsVocab) )
throw new CmdException("No such file for RDFS: "+rdfsVocab);
serverArgs.rdfsSchemaGraph = RDFDataMgr.loadGraph(rdfsVocab);
}
// ---- Misc features.
if ( contains(argTimeout) ) {
String str = getValue(argTimeout);
ARQ.getContext().set(ARQ.queryTimeout, str);
}
if ( contains(argGeneralQuerySvc) ) {
String z = getValue(argGeneralQuerySvc);
if ( ! z.startsWith("/") )
z = "/"+z;
serverArgs.addGeneral = z;
}
if ( contains(argValidators) ) {
serverArgs.validators = true;
}
// -- Server setup.
boolean hasJettyConfigFile = contains(argJettyConfig);
// ---- Port
serverArgs.port = defaultPort;
if ( contains(argPort) ) {
if ( hasJettyConfigFile )
throw new CmdException("Cannot specify the port and also provide a Jetty configuration file");
serverArgs.port = portNumber(argPort);
}
if ( contains(argLocalhost) ) {
if ( hasJettyConfigFile )
throw new CmdException("Cannot specify 'localhost' and also provide a Jetty configuration file");
serverArgs.loopback = true;
}
if ( contains(argContextPath) ) {
String contextPath = getValue(argContextPath);
contextPath = sanitizeContextPath(contextPath);
if ( contextPath != null )
serverArgs.servletContextPath = contextPath;
}
if ( contains(argBase) ) {
// Static files.
String filebase = getValue(argBase);
if ( ! FileOps.exists(filebase) ) {
throw new CmdException("File area not found: "+filebase);
//FmtLog.warn(Fuseki.configLog, "File area not found: "+filebase);
}
serverArgs.contentDirectory = filebase;
}
if ( contains(argPasswdFile) ) {
if ( hasJettyConfigFile )
throw new CmdException("Can't specify a password file and also provide a Jetty configuration file");
serverArgs.passwdFile = getValue(argPasswdFile);
}
if ( contains(argRealm) )
serverArgs.realm = getValue(argRealm);
if ( contains(argHttpsPort) && ! contains(argHttps) )
throw new CmdException("https port given but not certificate details via --"+argHttps.getKeyName());
if ( contains(argHttps) ) {
if ( hasJettyConfigFile )
throw new CmdException("Can't specify \"https\" and also provide a Jetty configuration file");
serverArgs.httpsPort = defaultHttpsPort;
if ( contains(argHttpsPort) )
serverArgs.httpsPort = portNumber(argHttpsPort);
String httpsSetup = getValue(argHttps);
// The details go in a separate file that can be secured.
serverArgs.httpsKeysDetails = httpsSetup;
}
if ( contains(argAuth) ) {
if ( hasJettyConfigFile )
throw new CmdException("Can't specify authentication and also provide a Jetty configuration file");
String schemeStr = getValue(argAuth);
serverArgs.authScheme = AuthScheme.scheme(schemeStr);
}
// Jetty server : this will be the server configuration regardless of other settings.
if ( contains(argJettyConfig) ) {
String jettyConfigFile = getValue(argJettyConfig);
if ( ! FileOps.exists(jettyConfigFile) )
throw new CmdException("Jetty config file not found: "+jettyConfigFile);
serverArgs.jettyConfigFile = jettyConfigFile;
}
boolean withModules = hasValueOfTrue(argEnableModules);
if ( withModules ) {
FusekiModules presetModules = serverArgs.fusekiModules;
// Get auto modules from system-wide setup.
FusekiModules autoModules = FusekiModules.getSystemModules();
// Merge preset and auto-loaded modules into one FusekiModules instance.
if ( presetModules == null ) {
serverArgs.fusekiModules = autoModules;
} else {
List<FusekiModule> allModules = Stream.concat(
presetModules.asList().stream(),
autoModules.asList().stream())
.distinct()
.toList();
serverArgs.fusekiModules = FusekiModules.create(allModules);
}
}
if ( contains(argCORS) ) {
String corsConfigFile = getValue(argCORS);
if ( ! FileOps.exists(corsConfigFile) )
throw new CmdException("CORS config file not found: "+corsConfigFile);
serverArgs.corsConfigFile = corsConfigFile;
} else if (contains(argNoCORS)) {
serverArgs.withCORS = ! contains(argNoCORS);
}
serverArgs.withPing = contains(argWithPing);
serverArgs.withStats = contains(argWithStats);
serverArgs.withMetrics = contains(argWithMetrics);
serverArgs.withCompact = contains(argWithCompact);
}
private int portNumber(ArgDecl arg) {
String portStr = getValue(arg);
if ( portStr.isEmpty() )
return -1;
try {
int port = Integer.parseInt(portStr);
return port;
} catch (NumberFormatException ex) {
throw new CmdException(argPort.getKeyName() + " : bad port number: '" + portStr+"'");
}
}
private static String sanitizeContextPath(String contextPath) {
if ( contextPath.isEmpty() )
return null;
if ( contextPath.equals("/") )
return null;
if ( contextPath.endsWith("/") ) {
throw new CmdException("Path base must not end with \"/\": '"+contextPath+"'");
//contextPath = StringUtils.chop(contextPath);
}
if ( ! contextPath.startsWith("/") )
contextPath = "/"+contextPath;
return contextPath;
}
@Override
protected void exec() {
FusekiCoreInfo.logCode(Fuseki.serverLog);
FusekiServer server = execMakeServer();
execStartServer(server);
}
private FusekiServer execMakeServer() {
try {
return makeServer(serverArgs);
} catch (AssemblerException | FusekiException ex) {
if ( ex.getCause() != null )
System.err.println(ex.getCause().getMessage());
else
System.err.println(ex.getMessage());
throw new TerminationException(1);
}
}
/** The method is blocking. */
private void execStartServer(FusekiServer server) {
infoCmd(server, Fuseki.serverLog);
try {
server.start();
} catch (FusekiException ex) {
if ( ex.getCause() instanceof BindException ) {
if ( serverArgs.jettyConfigFile == null )
Fuseki.serverLog.error("Failed to start server: "+ex.getCause().getMessage()+ ": port="+serverArgs.port);
else
Fuseki.serverLog.error("Failed to start server: "+ex.getCause().getMessage()+ ": port in use");
System.exit(1);
}
throw ex;
} catch (Exception ex) {
throw new FusekiException("Failed to start server: " + ex.getMessage(), ex);
}
// This does not normally return.
server.join();
System.exit(0);
}
/**
* Take a {@link ServerArgs} and make a {@Link FusekiServer}.
* The server has not been started.
*/
private FusekiServer makeServer(ServerArgs serverArgs) {
FusekiServer.Builder builder = FusekiServer.create();
applyServerArgs(builder, serverArgs);
return builder.build();
}
/** Apply {@link ServerArgs} to a {@link FusekiServer.Builder}. */
private void applyServerArgs(FusekiServer.Builder builder, ServerArgs serverArgs) {
boolean commandLineSetup = ( serverArgs.dataset != null || serverArgs.dsgMaker != null );
if ( serverArgs.jettyConfigFile != null )
builder.jettyServerConfig(serverArgs.jettyConfigFile);
builder.port(serverArgs.port);
builder.loopback(serverArgs.loopback);
builder.verbose(serverArgs.verboseLogging);
if ( serverArgs.addGeneral != null )
// Add SPARQL_QueryGeneral as a general servlet, not reached by the service router.
builder.addServlet(serverArgs.addGeneral, new SPARQL_QueryGeneral());
if ( serverArgs.validators ) {
// Validators.
builder.addServlet("/$/validate/query", new QueryValidator());
builder.addServlet("/$/validate/update", new UpdateValidator());
builder.addServlet("/$/validate/iri", new IRIValidator());
builder.addServlet("/$/validate/langtag", new LangTagValidator());
builder.addServlet("/$/validate/data", new DataValidator());
}
// Apply argument for the database services
// if not empty
// If there is a config model - use that (ignore command line dataset)
// If there is a config file - load and use that (ignore command line dataset)
// Command line.
if ( ! serverArgs.startEmpty ) {
if (serverArgs.serverConfigModel != null ) {
// -- Customiser has already set the configuration model
builder.parseConfig(serverArgs.serverConfigModel);
serverArgs.datasetDescription = "Configuration: provided";
} else if ( serverArgs.serverConfigFile != null ) {
// -- Configuration file.
String file = serverArgs.serverConfigFile;
if ( file.startsWith("file:") )
file = file.substring("file:".length());
Path path = Path.of(file);
IOX.checkReadableFile(file, msg->new CmdException(msg));
serverArgs.datasetDescription = "Configuration: "+path.toAbsolutePath();
serverArgs.serverConfigModel = RDFParser.source(path).toModel();
// Add dataset and model declarations.
AssemblerUtils.prepareForAssembler(serverArgs.serverConfigModel);
// ... and perform server configuration
builder.parseConfig(serverArgs.serverConfigModel);
} else {
// No serverConfigFile, no serverConfigModel.
// -- A dataset setup by command line arguments.
if ( serverArgs.datasetPath == null )
throw new CmdException("No URL path name for the dataset");
// The dataset setup by command line arguments.
// A customizer may have set the dataset.
if ( serverArgs.dataset == null ) {
// The dsgMaker should set serverArgs.dataset and serverArgs.datasetDescription
serverArgs.dsgMaker.accept(serverArgs);
}
// This should have been set somehow by this point.
if ( serverArgs.dataset == null )
// Internal error: should have happened during checking earlier.
throw new CmdException("Failed to set the dataset service");
// RDFS -- Command line - add RDFS
if ( serverArgs.rdfsSchemaGraph != null ) {
DSGSetup.setupRDFS(Fuseki.serverLog, serverArgs.rdfsSchemaGraph, serverArgs);
}
builder.add(serverArgs.datasetPath, serverArgs.dataset, serverArgs.allowUpdate);
}
}
if ( serverArgs.fusekiModules != null )
builder.fusekiModules(serverArgs.fusekiModules);
if ( serverArgs.servletContextPath != null )
builder.contextPath(serverArgs.servletContextPath);
if ( serverArgs.contentDirectory != null )
builder.staticFileBase(serverArgs.contentDirectory);
if ( serverArgs.passwdFile != null )
builder.passwordFile(serverArgs.passwdFile);
if ( serverArgs.realm != null )
builder.realm(serverArgs.realm);
if ( serverArgs.httpsKeysDetails != null)
builder.https(serverArgs.httpsPort, serverArgs.httpsKeysDetails);
if ( serverArgs.authScheme != null )
builder.auth(serverArgs.authScheme);
if ( serverArgs.withCORS )
builder.enableCors(true, serverArgs.corsConfigFile);
if ( serverArgs.withPing )
builder.enablePing(true);
if ( serverArgs.withStats )
builder.enableStats(true);
if ( serverArgs.withMetrics )
builder.enableMetrics(true);
if ( serverArgs.withCompact )
builder.enableCompact(true);
// Allow customisers to inspect and modify the builder.
applyCustomisers(customiser->customiser.serverArgsBuilder(builder, serverArgs.serverConfigModel));
}
/** Information from the command line setup */
private void infoCmd(FusekiServer server, Logger log) {
if ( super.isQuiet() )
return;
DataAccessPointRegistry dapRegistry = DataAccessPointRegistry.get(server.getServletContext());
if ( serverArgs.datasetPath != null ) {
if ( dapRegistry.size() != 1 )
log.error("Expected only one dataset in the DataAccessPointRegistry");
}
// Log details on startup.
String datasetPath = serverArgs.datasetPath;
String datasetDescription = serverArgs.datasetDescription;
String serverConfigFile = serverArgs.serverConfigFile;
String staticFiles = serverArgs.contentDirectory;
boolean verbose = serverArgs.verboseLogging;
if ( ! super.isQuiet() )
FusekiCoreInfo.logServerCmdSetup(log, verbose, dapRegistry,
datasetPath, datasetDescription, serverConfigFile, staticFiles);
}
@Override
protected String getCommandName() {
return "fuseki";
}
}
|
googleapis/google-cloud-java | 35,205 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListConnectionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Request message for ListConnections.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListConnectionsRequest}
*/
public final class ListConnectionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListConnectionsRequest)
ListConnectionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListConnectionsRequest.newBuilder() to construct.
private ListConnectionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListConnectionsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListConnectionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListConnectionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListConnectionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListConnectionsRequest.class,
com.google.privacy.dlp.v2.ListConnectionsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Number of results per page, max 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.ListConnectionsRequest)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.ListConnectionsRequest other =
(com.google.privacy.dlp.v2.ListConnectionsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.ListConnectionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListConnections.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListConnectionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListConnectionsRequest)
com.google.privacy.dlp.v2.ListConnectionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListConnectionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListConnectionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListConnectionsRequest.class,
com.google.privacy.dlp.v2.ListConnectionsRequest.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.ListConnectionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListConnectionsRequest_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListConnectionsRequest getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.ListConnectionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListConnectionsRequest build() {
com.google.privacy.dlp.v2.ListConnectionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListConnectionsRequest buildPartial() {
com.google.privacy.dlp.v2.ListConnectionsRequest result =
new com.google.privacy.dlp.v2.ListConnectionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.privacy.dlp.v2.ListConnectionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.ListConnectionsRequest) {
return mergeFrom((com.google.privacy.dlp.v2.ListConnectionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.ListConnectionsRequest other) {
if (other == com.google.privacy.dlp.v2.ListConnectionsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the organization or project, for
* example, `organizations/433245324/locations/europe` or
* `projects/project-id/locations/asia`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Number of results per page, max 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Number of results per page, max 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Number of results per page, max 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Page token from a previous page to return the next set of
* results. If set, all other request fields must match the original request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Supported field/value: `state` - MISSING|AVAILABLE|ERROR
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListConnectionsRequest)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListConnectionsRequest)
private static final com.google.privacy.dlp.v2.ListConnectionsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListConnectionsRequest();
}
public static com.google.privacy.dlp.v2.ListConnectionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListConnectionsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListConnectionsRequest>() {
@java.lang.Override
public ListConnectionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListConnectionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListConnectionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListConnectionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,173 | java-functions/proto-google-cloud-functions-v2beta/src/main/java/com/google/cloud/functions/v2beta/StorageSource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/functions/v2beta/functions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.functions.v2beta;
/**
*
*
* <pre>
* Location of the source in an archive file in Google Cloud Storage.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2beta.StorageSource}
*/
public final class StorageSource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.functions.v2beta.StorageSource)
StorageSourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use StorageSource.newBuilder() to construct.
private StorageSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StorageSource() {
bucket_ = "";
object_ = "";
sourceUploadUrl_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StorageSource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_StorageSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_StorageSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2beta.StorageSource.class,
com.google.cloud.functions.v2beta.StorageSource.Builder.class);
}
public static final int BUCKET_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object bucket_ = "";
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @return The bucket.
*/
@java.lang.Override
public java.lang.String getBucket() {
java.lang.Object ref = bucket_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
bucket_ = s;
return s;
}
}
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @return The bytes for bucket.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBucketBytes() {
java.lang.Object ref = bucket_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
bucket_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int OBJECT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object object_ = "";
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @return The object.
*/
@java.lang.Override
public java.lang.String getObject() {
java.lang.Object ref = object_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
object_ = s;
return s;
}
}
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @return The bytes for object.
*/
@java.lang.Override
public com.google.protobuf.ByteString getObjectBytes() {
java.lang.Object ref = object_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
object_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GENERATION_FIELD_NUMBER = 3;
private long generation_ = 0L;
/**
*
*
* <pre>
* Google Cloud Storage generation for the object. If the generation is
* omitted, the latest generation will be used.
* </pre>
*
* <code>int64 generation = 3;</code>
*
* @return The generation.
*/
@java.lang.Override
public long getGeneration() {
return generation_;
}
public static final int SOURCE_UPLOAD_URL_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceUploadUrl_ = "";
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @return The sourceUploadUrl.
*/
@java.lang.Override
public java.lang.String getSourceUploadUrl() {
java.lang.Object ref = sourceUploadUrl_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceUploadUrl_ = s;
return s;
}
}
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @return The bytes for sourceUploadUrl.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceUploadUrlBytes() {
java.lang.Object ref = sourceUploadUrl_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceUploadUrl_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bucket_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, object_);
}
if (generation_ != 0L) {
output.writeInt64(3, generation_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, sourceUploadUrl_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bucket_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, object_);
}
if (generation_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, generation_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, sourceUploadUrl_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.functions.v2beta.StorageSource)) {
return super.equals(obj);
}
com.google.cloud.functions.v2beta.StorageSource other =
(com.google.cloud.functions.v2beta.StorageSource) obj;
if (!getBucket().equals(other.getBucket())) return false;
if (!getObject().equals(other.getObject())) return false;
if (getGeneration() != other.getGeneration()) return false;
if (!getSourceUploadUrl().equals(other.getSourceUploadUrl())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BUCKET_FIELD_NUMBER;
hash = (53 * hash) + getBucket().hashCode();
hash = (37 * hash) + OBJECT_FIELD_NUMBER;
hash = (53 * hash) + getObject().hashCode();
hash = (37 * hash) + GENERATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getGeneration());
hash = (37 * hash) + SOURCE_UPLOAD_URL_FIELD_NUMBER;
hash = (53 * hash) + getSourceUploadUrl().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.StorageSource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.StorageSource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.StorageSource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.functions.v2beta.StorageSource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Location of the source in an archive file in Google Cloud Storage.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2beta.StorageSource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.functions.v2beta.StorageSource)
com.google.cloud.functions.v2beta.StorageSourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_StorageSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_StorageSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2beta.StorageSource.class,
com.google.cloud.functions.v2beta.StorageSource.Builder.class);
}
// Construct using com.google.cloud.functions.v2beta.StorageSource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
bucket_ = "";
object_ = "";
generation_ = 0L;
sourceUploadUrl_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_StorageSource_descriptor;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.StorageSource getDefaultInstanceForType() {
return com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.functions.v2beta.StorageSource build() {
com.google.cloud.functions.v2beta.StorageSource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.StorageSource buildPartial() {
com.google.cloud.functions.v2beta.StorageSource result =
new com.google.cloud.functions.v2beta.StorageSource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.functions.v2beta.StorageSource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.bucket_ = bucket_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.object_ = object_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.generation_ = generation_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.sourceUploadUrl_ = sourceUploadUrl_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.functions.v2beta.StorageSource) {
return mergeFrom((com.google.cloud.functions.v2beta.StorageSource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.functions.v2beta.StorageSource other) {
if (other == com.google.cloud.functions.v2beta.StorageSource.getDefaultInstance())
return this;
if (!other.getBucket().isEmpty()) {
bucket_ = other.bucket_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getObject().isEmpty()) {
object_ = other.object_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getGeneration() != 0L) {
setGeneration(other.getGeneration());
}
if (!other.getSourceUploadUrl().isEmpty()) {
sourceUploadUrl_ = other.sourceUploadUrl_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
bucket_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
object_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
generation_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
sourceUploadUrl_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object bucket_ = "";
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @return The bucket.
*/
public java.lang.String getBucket() {
java.lang.Object ref = bucket_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
bucket_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @return The bytes for bucket.
*/
public com.google.protobuf.ByteString getBucketBytes() {
java.lang.Object ref = bucket_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
bucket_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @param value The bucket to set.
* @return This builder for chaining.
*/
public Builder setBucket(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bucket_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearBucket() {
bucket_ = getDefaultInstance().getBucket();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage bucket containing the source (see
* [Bucket Name
* Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
* </pre>
*
* <code>string bucket = 1;</code>
*
* @param value The bytes for bucket to set.
* @return This builder for chaining.
*/
public Builder setBucketBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bucket_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object object_ = "";
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @return The object.
*/
public java.lang.String getObject() {
java.lang.Object ref = object_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
object_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @return The bytes for object.
*/
public com.google.protobuf.ByteString getObjectBytes() {
java.lang.Object ref = object_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
object_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @param value The object to set.
* @return This builder for chaining.
*/
public Builder setObject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
object_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearObject() {
object_ = getDefaultInstance().getObject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage object containing the source.
*
* This object must be a gzipped archive file (`.tar.gz`) containing source to
* build.
* </pre>
*
* <code>string object = 2;</code>
*
* @param value The bytes for object to set.
* @return This builder for chaining.
*/
public Builder setObjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
object_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private long generation_;
/**
*
*
* <pre>
* Google Cloud Storage generation for the object. If the generation is
* omitted, the latest generation will be used.
* </pre>
*
* <code>int64 generation = 3;</code>
*
* @return The generation.
*/
@java.lang.Override
public long getGeneration() {
return generation_;
}
/**
*
*
* <pre>
* Google Cloud Storage generation for the object. If the generation is
* omitted, the latest generation will be used.
* </pre>
*
* <code>int64 generation = 3;</code>
*
* @param value The generation to set.
* @return This builder for chaining.
*/
public Builder setGeneration(long value) {
generation_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Google Cloud Storage generation for the object. If the generation is
* omitted, the latest generation will be used.
* </pre>
*
* <code>int64 generation = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearGeneration() {
bitField0_ = (bitField0_ & ~0x00000004);
generation_ = 0L;
onChanged();
return this;
}
private java.lang.Object sourceUploadUrl_ = "";
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @return The sourceUploadUrl.
*/
public java.lang.String getSourceUploadUrl() {
java.lang.Object ref = sourceUploadUrl_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceUploadUrl_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @return The bytes for sourceUploadUrl.
*/
public com.google.protobuf.ByteString getSourceUploadUrlBytes() {
java.lang.Object ref = sourceUploadUrl_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceUploadUrl_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @param value The sourceUploadUrl to set.
* @return This builder for chaining.
*/
public Builder setSourceUploadUrl(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceUploadUrl_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceUploadUrl() {
sourceUploadUrl_ = getDefaultInstance().getSourceUploadUrl();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* When the specified storage bucket is a 1st gen function uploard url bucket,
* this field should be set as the generated upload url for 1st gen
* deployment.
* </pre>
*
* <code>string source_upload_url = 4;</code>
*
* @param value The bytes for sourceUploadUrl to set.
* @return This builder for chaining.
*/
public Builder setSourceUploadUrlBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceUploadUrl_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.functions.v2beta.StorageSource)
}
// @@protoc_insertion_point(class_scope:google.cloud.functions.v2beta.StorageSource)
private static final com.google.cloud.functions.v2beta.StorageSource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.functions.v2beta.StorageSource();
}
public static com.google.cloud.functions.v2beta.StorageSource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StorageSource> PARSER =
new com.google.protobuf.AbstractParser<StorageSource>() {
@java.lang.Override
public StorageSource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StorageSource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StorageSource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.StorageSource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/myfaces | 35,515 | impl/src/main/java/org/apache/myfaces/view/facelets/tag/composite/CompositeComponentResourceTagHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.view.facelets.tag.composite;
import java.beans.BeanDescriptor;
import java.beans.BeanInfo;
import java.beans.PropertyDescriptor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import jakarta.el.ELException;
import jakarta.el.ValueExpression;
import jakarta.el.VariableMapper;
import jakarta.faces.FacesException;
import jakarta.faces.application.ProjectStage;
import jakarta.faces.application.Resource;
import jakarta.faces.component.ActionSource;
import jakarta.faces.component.EditableValueHolder;
import jakarta.faces.component.UIComponent;
import jakarta.faces.component.UIPanel;
import jakarta.faces.component.UniqueIdVendor;
import jakarta.faces.component.ValueHolder;
import jakarta.faces.context.FacesContext;
import jakarta.faces.event.PhaseId;
import jakarta.faces.view.AttachedObjectHandler;
import jakarta.faces.view.ViewDeclarationLanguage;
import jakarta.faces.view.facelets.ComponentConfig;
import jakarta.faces.view.facelets.ComponentHandler;
import jakarta.faces.view.facelets.FaceletContext;
import jakarta.faces.view.facelets.FaceletException;
import jakarta.faces.view.facelets.FaceletHandler;
import jakarta.faces.view.facelets.MetaRuleset;
import jakarta.faces.view.facelets.Metadata;
import jakarta.faces.view.facelets.TagException;
import jakarta.faces.view.facelets.TextHandler;
import org.apache.myfaces.application.NavigationHandlerImpl;
import org.apache.myfaces.view.facelets.AbstractFaceletContext;
import org.apache.myfaces.view.facelets.FaceletCompositionContext;
import org.apache.myfaces.view.facelets.TemplateClient;
import org.apache.myfaces.view.facelets.TemplateContext;
import org.apache.myfaces.view.facelets.el.VariableMapperWrapper;
import org.apache.myfaces.view.facelets.tag.ComponentContainerHandler;
import org.apache.myfaces.view.facelets.tag.TagHandlerUtils;
import org.apache.myfaces.view.facelets.tag.faces.ActionSourceRule;
import org.apache.myfaces.view.facelets.tag.faces.ClearBindingValueExpressionListener;
import org.apache.myfaces.view.facelets.tag.faces.ComponentBuilderHandler;
import org.apache.myfaces.view.facelets.tag.faces.ComponentSupport;
import org.apache.myfaces.view.facelets.tag.faces.EditableValueHolderRule;
import org.apache.myfaces.view.facelets.tag.faces.PreDisposeViewEvent;
import org.apache.myfaces.view.facelets.tag.faces.ValueHolderRule;
import org.apache.myfaces.view.facelets.tag.faces.core.AjaxHandler;
/**
* This handler is responsible for apply composite components. It
* is created by CompositeResourceLibrary class when a composite component
* is found.
*
* @author Leonardo Uribe (latest modification by $Author$)
* @version $Revision$ $Date$
*/
public class CompositeComponentResourceTagHandler extends ComponentHandler
implements ComponentBuilderHandler, TemplateClient
{
public static final String CREATE_CC_ON_POST_ADD_TO_VIEW = "oamf.cc.CREATE_CC_POST_ADD_TO_VIEW";
private final Resource _resource;
private Metadata _mapper;
private Class<?> _lastType = Object.class;
protected volatile Map<String, FaceletHandler> _facetHandlersMap;
protected final ArrayList<FaceletHandler> _componentHandlers;
protected final ArrayList<FaceletHandler> _facetHandlers;
private boolean _dynamicCompositeComponent;
public CompositeComponentResourceTagHandler(ComponentConfig config, Resource resource)
{
super(config);
_resource = resource;
_facetHandlers = TagHandlerUtils.findNextByType(nextHandler, jakarta.faces.view.facelets.FacetHandler.class,
InsertFacetHandler.class);
_componentHandlers = TagHandlerUtils.findNextByType(nextHandler,
jakarta.faces.view.facelets.ComponentHandler.class,
ComponentContainerHandler.class, TextHandler.class);
_dynamicCompositeComponent = false;
}
@Override
public UIComponent createComponent(FaceletContext ctx)
{
FacesContext facesContext = ctx.getFacesContext();
UIComponent component = facesContext.getApplication().createComponent(facesContext, _resource);
if (getBinding() != null)
{
ValueExpression bindingVE = getBinding().getValueExpression(ctx, Object.class);
component.setValueExpression("binding", bindingVE);
if (!bindingVE.isReadOnly(facesContext.getELContext()))
{
if (PhaseId.RESTORE_VIEW.equals(facesContext.getCurrentPhaseId()))
{
bindingVE.setValue(ctx, component);
}
ComponentSupport.getViewRoot(ctx, component)
.getAttributes().put(NavigationHandlerImpl.CALL_PRE_DISPOSE_VIEW, Boolean.TRUE);
component.subscribeToEvent(PreDisposeViewEvent.class, new ClearBindingValueExpressionListener());
}
}
// Check required attributes if the app is not on production stage.
// Unfortunately, we can't check it on constructor because we need to call
// ViewDeclarationLanguage.getComponentMetadata() and on that point it is possible to not
// have a viewId.
if (!facesContext.isProjectStage(ProjectStage.Production))
{
BeanInfo beanInfo = (BeanInfo) component.getAttributes().get(UIComponent.BEANINFO_KEY);
for (PropertyDescriptor propertyDescriptor : beanInfo.getPropertyDescriptors())
{
ValueExpression ve = (ValueExpression) propertyDescriptor.getValue("required");
if (ve != null)
{
Object value = ve.getValue (facesContext.getELContext());
Boolean required;
if (value instanceof Boolean boolean1)
{
required = boolean1;
}
else
{
required = Boolean.valueOf(value.toString());
}
if (required != null && required)
{
Object attrValue = this.tag.getAttributes().get (propertyDescriptor.getName());
if (attrValue == null)
{
throw new TagException(this.tag, "Attribute '" + propertyDescriptor.getName()
+ "' is required");
}
}
}
}
}
return component;
}
@SuppressWarnings("unchecked")
@Override
public void applyNextHandler(FaceletContext ctx, UIComponent c) throws IOException
{
FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(ctx);
// Since Faces 2.2, there are two cases here:
//
// 1. The composite component content is defined as facelet content like usual.
// 2. The composite component content will be defined programmatically. That means,
// once the component instance is created, the user will be responsible to add
// children / facets and the code that process the composite component take effect
// when the composite component is added to the view.
if (mctx.isDynamicCompositeComponentHandler())
{
_dynamicCompositeComponent = true;
try
{
mctx.setDynamicCompositeComponentHandler(false);
// If the composite component needs to be created dynamically
//
Integer step = (Integer) c.getAttributes().get(CREATE_CC_ON_POST_ADD_TO_VIEW);
if (step == null)
{
// The flag is not found, so we are creating the component right now.
// Add the flag and return.
c.getAttributes().put(CREATE_CC_ON_POST_ADD_TO_VIEW, 0);
}
else if (step == 0)
{
// Should not happen, stop processing
}
else if (step == 1)
{
// The component was created, and the listener attached to PostAddToViewEvent
// is executing right now. Do the necessary steps to process the
// composite component dynamically.
applyNextHandlerIfNotAppliedDynamically(ctx, c);
applyCompositeComponentFacelet(ctx,c);
applyFinalInitializationSteps(ctx, mctx, c);
c.getAttributes().put(CREATE_CC_ON_POST_ADD_TO_VIEW, 2);
}
else
{
// Refresh over dynamic composite component
applyCompositeComponentFacelet(ctx,c);
}
}
finally
{
mctx.setDynamicCompositeComponentHandler(true);
}
}
else
{
applyNextHandlerIfNotApplied(ctx, c);
applyCompositeComponentFacelet(ctx,c);
if (ComponentHandler.isNew(c))
{
applyFinalInitializationSteps(ctx, mctx, c);
}
}
}
protected void applyFinalInitializationSteps(FaceletContext ctx, FaceletCompositionContext mctx, UIComponent c)
{
FacesContext facesContext = ctx.getFacesContext();
ViewDeclarationLanguage vdl = facesContext.getApplication().getViewHandler().
getViewDeclarationLanguage(facesContext, facesContext.getViewRoot().getViewId());
List<AttachedObjectHandler> handlers = mctx.getAttachedObjectHandlers(c);
if (handlers != null)
{
vdl.retargetAttachedObjects(facesContext, c, handlers);
// remove the list of handlers, as it is no longer necessary
mctx.removeAttachedObjectHandlers(c);
}
vdl.retargetMethodExpressions(facesContext, c);
if ( FaceletCompositionContext.getCurrentInstance(ctx).isMarkInitialState())
{
// Call it only if we are using partial state saving
c.markInitialState();
// Call it to other components created not bound by a tag handler
c.getFacet(UIComponent.COMPOSITE_FACET_NAME).markInitialState();
}
}
@SuppressWarnings("unchecked")
protected void applyNextHandlerIfNotApplied(FaceletContext ctx, UIComponent c)
throws IOException
{
//Apply all facelets not applied yet.
CompositeComponentBeanInfo beanInfo =
(CompositeComponentBeanInfo) c.getAttributes().get(UIComponent.BEANINFO_KEY);
BeanDescriptor beanDescriptor = beanInfo.getBeanDescriptor();
boolean insertChildrenUsed = (beanDescriptor.getValue(InsertChildrenHandler.INSERT_CHILDREN_USED) != null);
List<String> insertFacetList = (List<String>) beanDescriptor.getValue(InsertFacetHandler.INSERT_FACET_USED);
if (nextHandler instanceof jakarta.faces.view.facelets.CompositeFaceletHandler faceletHandler)
{
for (FaceletHandler handler :
faceletHandler.getHandlers())
{
if (handler instanceof jakarta.faces.view.facelets.FacetHandler facetHandler)
{
if (insertFacetList == null ||
!insertFacetList.contains(
facetHandler.getFacetName(ctx)))
{
handler.apply(ctx, c);
}
}
else if (handler instanceof InsertFacetHandler facetHandler)
{
if (insertFacetList == null ||
!insertFacetList.contains(facetHandler.getFacetName(ctx)))
{
handler.apply(ctx, c);
}
}
else if (insertChildrenUsed)
{
if (!(handler instanceof jakarta.faces.view.facelets.ComponentHandler ||
handler instanceof ComponentContainerHandler ||
handler instanceof TextHandler))
{
handler.apply(ctx, c);
}
}
else
{
handler.apply(ctx, c);
}
}
}
else
{
if (nextHandler instanceof jakarta.faces.view.facelets.FacetHandler handler)
{
if (insertFacetList == null ||
!insertFacetList.contains(
handler.getFacetName(ctx)))
{
nextHandler.apply(ctx, c);
}
}
else if (nextHandler instanceof InsertFacetHandler handler)
{
if (insertFacetList == null ||
!insertFacetList.contains(handler.getFacetName(ctx)) )
{
nextHandler.apply(ctx, c);
}
}
else if (insertChildrenUsed)
{
if (!(nextHandler instanceof jakarta.faces.view.facelets.ComponentHandler ||
nextHandler instanceof ComponentContainerHandler ||
nextHandler instanceof TextHandler))
{
nextHandler.apply(ctx, c);
}
}
else
{
nextHandler.apply(ctx, c);
}
}
//Check for required facets
Map<String, PropertyDescriptor> facetPropertyDescriptorMap = (Map<String, PropertyDescriptor>)
beanDescriptor.getValue(UIComponent.FACETS_KEY);
if (facetPropertyDescriptorMap != null)
{
List<String> facetsRequiredNotFound = null;
for (Map.Entry<String, PropertyDescriptor> entry : facetPropertyDescriptorMap.entrySet())
{
ValueExpression requiredExpr = (ValueExpression) entry.getValue().getValue("required");
if (requiredExpr != null)
{
Boolean required = requiredExpr.getValue(ctx.getFacesContext().getELContext());
if (Boolean.TRUE.equals(required))
{
initFacetHandlersMap(ctx);
if (!_facetHandlersMap.containsKey(entry.getKey()))
{
if (facetsRequiredNotFound == null)
{
facetsRequiredNotFound = new ArrayList(facetPropertyDescriptorMap.size());
}
facetsRequiredNotFound.add(entry.getKey());
}
}
}
}
if (facetsRequiredNotFound != null && !facetsRequiredNotFound.isEmpty())
{
throw new TagException(getTag(), "The following facets are required by the component: "
+ facetsRequiredNotFound);
}
}
}
protected void applyCompositeComponentFacelet(FaceletContext faceletContext, UIComponent compositeComponentBase)
throws IOException
{
FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(faceletContext);
AbstractFaceletContext actx = (AbstractFaceletContext) faceletContext;
UIPanel compositeFacetPanel =
(UIPanel) compositeComponentBase.getFacets().get(UIComponent.COMPOSITE_FACET_NAME);
if (compositeFacetPanel == null)
{
compositeFacetPanel = (UIPanel)
faceletContext.getFacesContext().getApplication().createComponent(
faceletContext.getFacesContext(), UIPanel.COMPONENT_TYPE, null);
compositeFacetPanel.getAttributes().put(ComponentSupport.COMPONENT_ADDED_BY_HANDLER_MARKER,
Boolean.TRUE);
compositeComponentBase.getFacets().put(UIComponent.COMPOSITE_FACET_NAME, compositeFacetPanel);
// Set an id to the created facet component, to prevent id generation and make
// partial state saving work without problem.
UniqueIdVendor uniqueIdVendor = mctx.getUniqueIdVendorFromStack();
if (uniqueIdVendor == null)
{
uniqueIdVendor = ComponentSupport.getViewRoot(faceletContext, compositeComponentBase);
}
if (uniqueIdVendor != null)
{
// UIViewRoot implements UniqueIdVendor, so there is no need to cast to UIViewRoot
// and call createUniqueId()
String uid = uniqueIdVendor.createUniqueId(faceletContext.getFacesContext(),
mctx.getSharedStringBuilder()
.append(compositeComponentBase.getId())
.append("__f_")
.append("cc_facet").toString());
compositeFacetPanel.setId(uid);
}
}
// Before call applyCompositeComponent we need to add ajax behaviors
// to the current compositeComponentBase. Note that super.applyNextHandler()
// has already been called, but this point is before vdl.retargetAttachedObjects,
// so we can't but this on ComponentTagHandlerDelegate, if we want this to be
// applied correctly.
Iterator<AjaxHandler> it = ((AbstractFaceletContext) faceletContext).getAjaxHandlers();
if (it != null)
{
while (it.hasNext())
{
mctx.addAttachedObjectHandler(compositeComponentBase, it.next());
}
}
VariableMapper orig = faceletContext.getVariableMapper();
try
{
faceletContext.setVariableMapper(new VariableMapperWrapper(orig));
actx.pushCompositeComponentClient(this);
Resource resourceForCurrentView = faceletContext.getFacesContext().getApplication().
getResourceHandler().createResource(_resource.getResourceName(), _resource.getLibraryName());
if (resourceForCurrentView != null)
{
//Wrap it for serialization.
resourceForCurrentView = new CompositeResourceWrapper(resourceForCurrentView);
}
else
{
//If a resource cannot be resolved it means a default for the current
//composite component does not exists.
throw new TagException(getTag(), "Composite Component " + getTag().getQName()
+ " requires a default instance that can be found by the installed ResourceHandler.");
}
actx.applyCompositeComponent(compositeFacetPanel, resourceForCurrentView);
}
finally
{
actx.popCompositeComponentClient();
faceletContext.setVariableMapper(orig);
}
}
@Override
public void setAttributes(FaceletContext ctx, Object instance)
{
if (instance != null)
{
UIComponent component = (UIComponent) instance;
Class<?> type = instance.getClass();
if (_mapper == null || !_lastType.equals(type))
{
_lastType = type;
BeanInfo beanInfo = (BeanInfo)component.getAttributes().get(UIComponent.BEANINFO_KEY);
_mapper = createMetaRuleset(type , beanInfo).finish();
}
_mapper.applyMetadata(ctx, instance);
}
}
protected MetaRuleset createMetaRuleset(Class<?> type, BeanInfo beanInfo)
{
MetaRuleset m = new CompositeMetaRulesetImpl(this.getTag(), type, beanInfo);
// ignore standard component attributes
m.ignore("binding").ignore("id");
// add auto wiring for attributes
m.addRule(CompositeComponentRule.INSTANCE);
// add retarget method expression rules
m.addRule(RetargetMethodExpressionRule.INSTANCE);
if (ActionSource.class.isAssignableFrom(type))
{
m.addRule(ActionSourceRule.INSTANCE);
}
if (ValueHolder.class.isAssignableFrom(type))
{
m.addRule(ValueHolderRule.INSTANCE);
if (EditableValueHolder.class.isAssignableFrom(type))
{
m.ignore("submittedValue");
m.ignore("valid");
m.addRule(EditableValueHolderRule.INSTANCE);
}
}
return m;
}
private void initFacetHandlersMap(FaceletContext ctx)
{
if (_facetHandlersMap == null)
{
Map<String, FaceletHandler> map = new HashMap<>(_facetHandlers.size());
for (FaceletHandler handler : _facetHandlers)
{
if (handler instanceof jakarta.faces.view.facelets.FacetHandler facetHandler )
{
map.put(facetHandler.getFacetName(ctx), handler);
}
else if (handler instanceof InsertFacetHandler facetHandler)
{
map.put(facetHandler.getFacetName(ctx), handler);
}
}
_facetHandlersMap = map;
}
}
@Override
public boolean apply(FaceletContext ctx, UIComponent parent, String name)
throws IOException, FacesException, FaceletException, ELException
{
if (_dynamicCompositeComponent)
{
AbstractFaceletContext actx = (AbstractFaceletContext) ctx;
FaceletCompositionContext fcc = actx.getFaceletCompositionContext();
UIComponent innerCompositeComponent = fcc.getCompositeComponentFromStack();
// In a programatical addition, the code that process the composite component only takes effect
// when the composite component is added to the view.
Integer step = (Integer) innerCompositeComponent.getAttributes().get(CREATE_CC_ON_POST_ADD_TO_VIEW);
if (step != null && step == 1)
{
if (name != null)
{
//1. Initialize map used to retrieve facets
if (innerCompositeComponent.getFacetCount() == 0)
{
checkFacetRequired(ctx, name);
return true;
}
UIComponent facet = innerCompositeComponent.getFacet(name);
if (facet != null)
{
// Insert facet
innerCompositeComponent.getFacets().remove(name);
parent.getFacets().put(name, facet);
return true;
}
else
{
checkFacetRequired(ctx, name);
return true;
}
}
else
{
if (innerCompositeComponent.getChildCount() > 0)
{
String facetName = (String) parent.getAttributes().get(
org.apache.myfaces.view.facelets.tag.faces.core.FacetHandler.KEY);
// Insert children
List<UIComponent> children = new ArrayList<>(innerCompositeComponent.getChildCount());
while (innerCompositeComponent.getChildCount() > 0)
{
children.add(innerCompositeComponent.getChildren().remove(0));
}
while (!children.isEmpty())
{
UIComponent child = children.remove(0);
child.getAttributes().put(InsertChildrenHandler.INSERT_CHILDREN_USED, Boolean.TRUE);
if (facetName != null)
{
ComponentSupport.addFacet(ctx, parent, child, facetName);
}
else
{
parent.getChildren().add(child);
}
}
}
return true;
}
}
else if (step != null && step > 1)
{
// refresh case, in facet case it is not necessary to remove/add the facet, because there
// is no relative order (it is always on the same spot).
if (name == null)
{
String facetName = (String) parent.getAttributes().get(
org.apache.myfaces.view.facelets.tag.faces.core.FacetHandler.KEY);
// refresh case, remember the inserted children does not have any
// associated tag handler, so in this case we just need to remove and add them in the same order
// we found them
List<UIComponent> children = null;
if (facetName == null)
{
children = new ArrayList<>(parent.getChildCount());
int i = 0;
while (parent.getChildCount()-i > 0)
{
UIComponent child = parent.getChildren().get(i);
if (Boolean.TRUE.equals(
child.getAttributes().get(InsertChildrenHandler.INSERT_CHILDREN_USED)))
{
children.add(parent.getChildren().remove(i));
}
else
{
i++;
}
}
}
else
{
children = new ArrayList<>();
UIComponent child = parent.getFacet(facetName);
if (Boolean.TRUE.equals(child.getAttributes().get(InsertChildrenHandler.INSERT_CHILDREN_USED)))
{
parent.getFacets().remove(facetName);
children.add(child);
}
else
{
UIComponent parentToApply = child;
int i = 0;
while (parentToApply.getChildCount()-i > 0)
{
child = parentToApply.getChildren().get(i);
if (Boolean.TRUE.equals(child.getAttributes().get(
InsertChildrenHandler.INSERT_CHILDREN_USED)))
{
children.add(parentToApply.getChildren().remove(i));
}
else
{
i++;
}
}
}
}
while (!children.isEmpty())
{
UIComponent child = children.remove(0);
if (facetName != null)
{
ComponentSupport.addFacet(ctx, parent, child, facetName);
}
else
{
parent.getChildren().add(child);
}
}
}
}
return true;
}
if (name != null)
{
//1. Initialize map used to retrieve facets
if (_facetHandlers == null || _facetHandlers.isEmpty())
{
checkFacetRequired(ctx, name);
return true;
}
initFacetHandlersMap(ctx);
FaceletHandler handler = _facetHandlersMap.get(name);
if (handler != null)
{
AbstractFaceletContext actx = (AbstractFaceletContext) ctx;
// Pop the current composite component on stack, so #{cc} references
// can be resolved correctly, because they are relative to the page
// that define it.
FaceletCompositionContext fcc = actx.getFaceletCompositionContext();
UIComponent innerCompositeComponent = fcc.getCompositeComponentFromStack();
fcc.popCompositeComponentToStack();
// Pop the template context, so ui:xx tags and nested composite component
// cases could work correctly
TemplateContext itc = actx.popTemplateContext();
try
{
handler.apply(ctx, parent);
}
finally
{
actx.pushTemplateContext(itc);
fcc.pushCompositeComponentToStack(innerCompositeComponent);
}
return true;
}
else
{
checkFacetRequired(ctx, name);
return true;
}
}
else
{
AbstractFaceletContext actx = (AbstractFaceletContext) ctx;
// Pop the current composite component on stack, so #{cc} references
// can be resolved correctly, because they are relative to the page
// that define it.
FaceletCompositionContext fcc = actx.getFaceletCompositionContext();
UIComponent innerCompositeComponent = fcc.getCompositeComponentFromStack();
fcc.popCompositeComponentToStack();
// Pop the template context, so ui:xx tags and nested composite component
// cases could work correctly
TemplateContext itc = actx.popTemplateContext();
try
{
for (int i = 0; i < _componentHandlers.size(); i++)
{
_componentHandlers.get(i).apply(ctx, parent);
}
}
finally
{
actx.pushTemplateContext(itc);
fcc.pushCompositeComponentToStack(innerCompositeComponent);
}
return true;
}
}
private void checkFacetRequired(FaceletContext ctx, String name)
{
AbstractFaceletContext actx = (AbstractFaceletContext) ctx;
FaceletCompositionContext fcc = actx.getFaceletCompositionContext();
UIComponent innerCompositeComponent = fcc.getCompositeComponentFromStack();
CompositeComponentBeanInfo beanInfo =
(CompositeComponentBeanInfo) innerCompositeComponent.getAttributes()
.get(UIComponent.BEANINFO_KEY);
BeanDescriptor beanDescriptor = beanInfo.getBeanDescriptor();
Map<String, PropertyDescriptor> insertFacetPropertyDescriptorMap = (Map<String, PropertyDescriptor>)
beanDescriptor.getValue(InsertFacetHandler.INSERT_FACET_KEYS);
if (insertFacetPropertyDescriptorMap != null && insertFacetPropertyDescriptorMap.containsKey(name))
{
ValueExpression requiredExpr
= (ValueExpression) insertFacetPropertyDescriptorMap.get(name).getValue("required");
if (requiredExpr != null &&
Boolean.TRUE.equals(requiredExpr.getValue(ctx.getFacesContext().getELContext())))
{
//Insert facet associated is required, but it was not applied.
throw new TagException(this.tag, "Cannot find facet with name '"+name+"' in composite component");
}
}
}
@SuppressWarnings("unchecked")
protected void applyNextHandlerIfNotAppliedDynamically(FaceletContext ctx, UIComponent c)
throws IOException
{
CompositeComponentBeanInfo beanInfo =
(CompositeComponentBeanInfo) c.getAttributes().get(UIComponent.BEANINFO_KEY);
BeanDescriptor beanDescriptor = beanInfo.getBeanDescriptor();
// Since the children / facet were added programmatically, there is no handler or facelets to apply.
//Check for required facets
Map<String, PropertyDescriptor> facetPropertyDescriptorMap = (Map<String, PropertyDescriptor>)
beanDescriptor.getValue(UIComponent.FACETS_KEY);
if (facetPropertyDescriptorMap != null)
{
List<String> facetsRequiredNotFound = null;
for (Map.Entry<String, PropertyDescriptor> entry : facetPropertyDescriptorMap.entrySet())
{
ValueExpression requiredExpr = (ValueExpression) entry.getValue().getValue("required");
if (requiredExpr != null)
{
Boolean required = requiredExpr.getValue(ctx.getFacesContext().getELContext());
if (Boolean.TRUE.equals(required))
{
if (c.getFacet(entry.getKey()) == null)
{
if (facetsRequiredNotFound == null)
{
facetsRequiredNotFound = new ArrayList(facetPropertyDescriptorMap.size());
}
facetsRequiredNotFound.add(entry.getKey());
}
}
}
}
if (facetsRequiredNotFound != null && !facetsRequiredNotFound.isEmpty())
{
throw new TagException(getTag(), "The following facets are required by the component: "
+ facetsRequiredNotFound);
}
}
}
}
|
googleapis/google-cloud-java | 35,196 | java-data-fusion/proto-google-cloud-data-fusion-v1beta1/src/main/java/com/google/cloud/datafusion/v1beta1/CreateInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datafusion/v1beta1/v1beta1.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datafusion.v1beta1;
/**
*
*
* <pre>
* Request message for creating a Data Fusion instance.
* </pre>
*
* Protobuf type {@code google.cloud.datafusion.v1beta1.CreateInstanceRequest}
*/
public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datafusion.v1beta1.CreateInstanceRequest)
CreateInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateInstanceRequest.newBuilder() to construct.
private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateInstanceRequest() {
parent_ = "";
instanceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.class,
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_FIELD_NUMBER = 3;
private com.google.cloud.datafusion.v1beta1.Instance instance_;
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.Instance getInstance() {
return instance_ == null
? com.google.cloud.datafusion.v1beta1.Instance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.InstanceOrBuilder getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.datafusion.v1beta1.Instance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datafusion.v1beta1.CreateInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest other =
(com.google.cloud.datafusion.v1beta1.CreateInstanceRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for creating a Data Fusion instance.
* </pre>
*
* Protobuf type {@code google.cloud.datafusion.v1beta1.CreateInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datafusion.v1beta1.CreateInstanceRequest)
com.google.cloud.datafusion.v1beta1.CreateInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.class,
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
instanceId_ = "";
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.CreateInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.CreateInstanceRequest build() {
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.CreateInstanceRequest buildPartial() {
com.google.cloud.datafusion.v1beta1.CreateInstanceRequest result =
new com.google.cloud.datafusion.v1beta1.CreateInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datafusion.v1beta1.CreateInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datafusion.v1beta1.CreateInstanceRequest) {
return mergeFrom((com.google.cloud.datafusion.v1beta1.CreateInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datafusion.v1beta1.CreateInstanceRequest other) {
if (other == com.google.cloud.datafusion.v1beta1.CreateInstanceRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The instance's project and location in the format
* projects/{project}/locations/{location}.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the instance to create.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.datafusion.v1beta1.Instance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.Instance,
com.google.cloud.datafusion.v1beta1.Instance.Builder,
com.google.cloud.datafusion.v1beta1.InstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*
* @return The instance.
*/
public com.google.cloud.datafusion.v1beta1.Instance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.datafusion.v1beta1.Instance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public Builder setInstance(com.google.cloud.datafusion.v1beta1.Instance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public Builder setInstance(
com.google.cloud.datafusion.v1beta1.Instance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public Builder mergeInstance(com.google.cloud.datafusion.v1beta1.Instance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& instance_ != null
&& instance_ != com.google.cloud.datafusion.v1beta1.Instance.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000004);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public com.google.cloud.datafusion.v1beta1.Instance.Builder getInstanceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
public com.google.cloud.datafusion.v1beta1.InstanceOrBuilder getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.datafusion.v1beta1.Instance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* An instance resource.
* </pre>
*
* <code>.google.cloud.datafusion.v1beta1.Instance instance = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.Instance,
com.google.cloud.datafusion.v1beta1.Instance.Builder,
com.google.cloud.datafusion.v1beta1.InstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.Instance,
com.google.cloud.datafusion.v1beta1.Instance.Builder,
com.google.cloud.datafusion.v1beta1.InstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datafusion.v1beta1.CreateInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datafusion.v1beta1.CreateInstanceRequest)
private static final com.google.cloud.datafusion.v1beta1.CreateInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datafusion.v1beta1.CreateInstanceRequest();
}
public static com.google.cloud.datafusion.v1beta1.CreateInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateInstanceRequest>() {
@java.lang.Override
public CreateInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.CreateInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,282 | java-api-gateway/proto-google-cloud-api-gateway-v1/src/main/java/com/google/cloud/apigateway/v1/UpdateGatewayRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigateway/v1/apigateway.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigateway.v1;
/**
*
*
* <pre>
* Request message for ApiGatewayService.UpdateGateway
* </pre>
*
* Protobuf type {@code google.cloud.apigateway.v1.UpdateGatewayRequest}
*/
public final class UpdateGatewayRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigateway.v1.UpdateGatewayRequest)
UpdateGatewayRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateGatewayRequest.newBuilder() to construct.
private UpdateGatewayRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateGatewayRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateGatewayRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigateway.v1.UpdateGatewayRequest.class,
com.google.cloud.apigateway.v1.UpdateGatewayRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int GATEWAY_FIELD_NUMBER = 2;
private com.google.cloud.apigateway.v1.Gateway gateway_;
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gateway field is set.
*/
@java.lang.Override
public boolean hasGateway() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gateway.
*/
@java.lang.Override
public com.google.cloud.apigateway.v1.Gateway getGateway() {
return gateway_ == null
? com.google.cloud.apigateway.v1.Gateway.getDefaultInstance()
: gateway_;
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.apigateway.v1.GatewayOrBuilder getGatewayOrBuilder() {
return gateway_ == null
? com.google.cloud.apigateway.v1.Gateway.getDefaultInstance()
: gateway_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getGateway());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getGateway());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigateway.v1.UpdateGatewayRequest)) {
return super.equals(obj);
}
com.google.cloud.apigateway.v1.UpdateGatewayRequest other =
(com.google.cloud.apigateway.v1.UpdateGatewayRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasGateway() != other.hasGateway()) return false;
if (hasGateway()) {
if (!getGateway().equals(other.getGateway())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasGateway()) {
hash = (37 * hash) + GATEWAY_FIELD_NUMBER;
hash = (53 * hash) + getGateway().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apigateway.v1.UpdateGatewayRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ApiGatewayService.UpdateGateway
* </pre>
*
* Protobuf type {@code google.cloud.apigateway.v1.UpdateGatewayRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigateway.v1.UpdateGatewayRequest)
com.google.cloud.apigateway.v1.UpdateGatewayRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigateway.v1.UpdateGatewayRequest.class,
com.google.cloud.apigateway.v1.UpdateGatewayRequest.Builder.class);
}
// Construct using com.google.cloud.apigateway.v1.UpdateGatewayRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getGatewayFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
gateway_ = null;
if (gatewayBuilder_ != null) {
gatewayBuilder_.dispose();
gatewayBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_UpdateGatewayRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.UpdateGatewayRequest getDefaultInstanceForType() {
return com.google.cloud.apigateway.v1.UpdateGatewayRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigateway.v1.UpdateGatewayRequest build() {
com.google.cloud.apigateway.v1.UpdateGatewayRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.UpdateGatewayRequest buildPartial() {
com.google.cloud.apigateway.v1.UpdateGatewayRequest result =
new com.google.cloud.apigateway.v1.UpdateGatewayRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigateway.v1.UpdateGatewayRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.gateway_ = gatewayBuilder_ == null ? gateway_ : gatewayBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigateway.v1.UpdateGatewayRequest) {
return mergeFrom((com.google.cloud.apigateway.v1.UpdateGatewayRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigateway.v1.UpdateGatewayRequest other) {
if (other == com.google.cloud.apigateway.v1.UpdateGatewayRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasGateway()) {
mergeGateway(other.getGateway());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getGatewayFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Field mask is used to specify the fields to be overwritten in the
* Gateway resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.apigateway.v1.Gateway gateway_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.Gateway,
com.google.cloud.apigateway.v1.Gateway.Builder,
com.google.cloud.apigateway.v1.GatewayOrBuilder>
gatewayBuilder_;
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the gateway field is set.
*/
public boolean hasGateway() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The gateway.
*/
public com.google.cloud.apigateway.v1.Gateway getGateway() {
if (gatewayBuilder_ == null) {
return gateway_ == null
? com.google.cloud.apigateway.v1.Gateway.getDefaultInstance()
: gateway_;
} else {
return gatewayBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGateway(com.google.cloud.apigateway.v1.Gateway value) {
if (gatewayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
gateway_ = value;
} else {
gatewayBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGateway(com.google.cloud.apigateway.v1.Gateway.Builder builderForValue) {
if (gatewayBuilder_ == null) {
gateway_ = builderForValue.build();
} else {
gatewayBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGateway(com.google.cloud.apigateway.v1.Gateway value) {
if (gatewayBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& gateway_ != null
&& gateway_ != com.google.cloud.apigateway.v1.Gateway.getDefaultInstance()) {
getGatewayBuilder().mergeFrom(value);
} else {
gateway_ = value;
}
} else {
gatewayBuilder_.mergeFrom(value);
}
if (gateway_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGateway() {
bitField0_ = (bitField0_ & ~0x00000002);
gateway_ = null;
if (gatewayBuilder_ != null) {
gatewayBuilder_.dispose();
gatewayBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigateway.v1.Gateway.Builder getGatewayBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getGatewayFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigateway.v1.GatewayOrBuilder getGatewayOrBuilder() {
if (gatewayBuilder_ != null) {
return gatewayBuilder_.getMessageOrBuilder();
} else {
return gateway_ == null
? com.google.cloud.apigateway.v1.Gateway.getDefaultInstance()
: gateway_;
}
}
/**
*
*
* <pre>
* Required. Gateway resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.Gateway gateway = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.Gateway,
com.google.cloud.apigateway.v1.Gateway.Builder,
com.google.cloud.apigateway.v1.GatewayOrBuilder>
getGatewayFieldBuilder() {
if (gatewayBuilder_ == null) {
gatewayBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.Gateway,
com.google.cloud.apigateway.v1.Gateway.Builder,
com.google.cloud.apigateway.v1.GatewayOrBuilder>(
getGateway(), getParentForChildren(), isClean());
gateway_ = null;
}
return gatewayBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigateway.v1.UpdateGatewayRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigateway.v1.UpdateGatewayRequest)
private static final com.google.cloud.apigateway.v1.UpdateGatewayRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigateway.v1.UpdateGatewayRequest();
}
public static com.google.cloud.apigateway.v1.UpdateGatewayRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateGatewayRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateGatewayRequest>() {
@java.lang.Override
public UpdateGatewayRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateGatewayRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateGatewayRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.UpdateGatewayRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hudi | 35,331 | hudi-hadoop-common/src/main/java/org/apache/hudi/hadoop/fs/HoodieWrapperFileSystem.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hudi.hadoop.fs;
import org.apache.hudi.common.fs.ConsistencyGuard;
import org.apache.hudi.common.fs.NoOpConsistencyGuard;
import org.apache.hudi.common.metrics.Registry;
import org.apache.hudi.common.util.HoodieTimer;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.storage.StorageSchemes;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;
import static org.apache.hudi.hadoop.fs.HadoopFSUtils.convertToHadoopPath;
import static org.apache.hudi.hadoop.fs.HadoopFSUtils.convertToStoragePath;
/**
* HoodieWrapperFileSystem wraps the default file system. It holds state about the open streams in the file system to
* support getting the written size to each of the open streams.
*/
public class HoodieWrapperFileSystem extends FileSystem {
public static final String HOODIE_SCHEME_PREFIX = "hoodie-";
private static final String TMP_PATH_POSTFIX = ".tmp";
private static final String METAFOLDER_NAME = ".hoodie";
/**
* Names for metrics.
*/
protected enum MetricName {
create, rename, delete, listStatus, mkdirs, getFileStatus, globStatus, listFiles, read, write
}
private static Registry METRICS_REGISTRY_DATA;
private static Registry METRICS_REGISTRY_META;
public static void setMetricsRegistry(Registry registry, Registry registryMeta) {
METRICS_REGISTRY_DATA = registry;
METRICS_REGISTRY_META = registryMeta;
}
private final ConcurrentMap<String, SizeAwareFSDataOutputStream> openStreams = new ConcurrentHashMap<>();
private FileSystem fileSystem;
private URI uri;
private ConsistencyGuard consistencyGuard = new NoOpConsistencyGuard();
/**
* Checked function interface.
*
* @param <R> Type of return value.
*/
@FunctionalInterface
public interface CheckedFunction<R> {
R get() throws IOException;
}
private static Registry getMetricRegistryForPath(Path p) {
return ((p != null) && (p.toString().contains(METAFOLDER_NAME)))
? METRICS_REGISTRY_META : METRICS_REGISTRY_DATA;
}
protected static <R> R executeFuncWithTimeMetrics(String metricName, Path p, CheckedFunction<R> func) throws IOException {
HoodieTimer timer = HoodieTimer.start();
R res = func.get();
Registry registry = getMetricRegistryForPath(p);
if (registry != null) {
registry.increment(metricName);
registry.add(metricName + ".totalDuration", timer.endTimer());
}
return res;
}
protected static <R> R executeFuncWithTimeAndByteMetrics(String metricName, Path p, long byteCount,
CheckedFunction<R> func) throws IOException {
Registry registry = getMetricRegistryForPath(p);
if (registry != null) {
registry.add(metricName + ".totalBytes", byteCount);
}
return executeFuncWithTimeMetrics(metricName, p, func);
}
public HoodieWrapperFileSystem() {
}
public HoodieWrapperFileSystem(FileSystem fileSystem, ConsistencyGuard consistencyGuard) {
this.fileSystem = fileSystem;
this.uri = fileSystem.getUri();
this.consistencyGuard = consistencyGuard;
}
public static Path convertToHoodiePath(StoragePath file, Configuration conf) {
try {
String scheme = HadoopFSUtils.getFs(file.toString(), conf).getScheme();
return convertPathWithScheme(convertToHadoopPath(file), getHoodieScheme(scheme));
} catch (HoodieIOException e) {
throw e;
}
}
public static Path convertPathWithScheme(Path oldPath, String newScheme) {
URI oldURI = oldPath.toUri();
URI newURI;
try {
newURI = new URI(newScheme,
oldURI.getAuthority(),
oldURI.getPath(),
oldURI.getQuery(),
oldURI.getFragment());
return new CachingPath(newURI);
} catch (URISyntaxException e) {
// TODO - Better Exception handling
throw new RuntimeException(e);
}
}
public static String getHoodieScheme(String scheme) {
String newScheme;
if (StorageSchemes.isSchemeSupported(scheme)) {
newScheme = HOODIE_SCHEME_PREFIX + scheme;
} else {
throw new IllegalArgumentException("BlockAlignedAvroParquetWriter does not support scheme " + scheme);
}
return newScheme;
}
@Override
public void initialize(URI uri, Configuration conf) {
// Get the default filesystem to decorate
Path path = new Path(uri);
// Remove 'hoodie-' prefix from path
if (path.toString().startsWith(HOODIE_SCHEME_PREFIX)) {
path = new Path(path.toString().replace(HOODIE_SCHEME_PREFIX, ""));
this.uri = path.toUri();
} else {
this.uri = uri;
}
this.fileSystem = HadoopFSUtils.getFs(path.toString(), conf);
// Do not need to explicitly initialize the default filesystem, its done already in the above
// FileSystem.get
// fileSystem.initialize(FileSystem.getDefaultUri(conf), conf);
// fileSystem.setConf(conf);
}
@Override
public URI getUri() {
return uri;
}
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
return wrapInputStream(f, fileSystem.open(convertToDefaultPath(f), bufferSize));
}
@Override
public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize,
short replication, long blockSize, Progressable progress) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
final Path translatedPath = convertToDefaultPath(f);
return wrapOutputStream(f,
fileSystem.create(translatedPath, permission, overwrite, bufferSize, replication, blockSize, progress));
});
}
private FSDataOutputStream wrapOutputStream(final Path path, FSDataOutputStream fsDataOutputStream)
throws IOException {
if (fsDataOutputStream instanceof SizeAwareFSDataOutputStream) {
return fsDataOutputStream;
}
SizeAwareFSDataOutputStream os = new SizeAwareFSDataOutputStream(path, fsDataOutputStream, consistencyGuard,
() -> openStreams.remove(path.getName()));
openStreams.put(path.getName(), os);
return os;
}
private FSDataInputStream wrapInputStream(final Path path, FSDataInputStream fsDataInputStream) throws IOException {
if (fsDataInputStream instanceof TimedFSDataInputStream) {
return fsDataInputStream;
}
return new TimedFSDataInputStream(path, fsDataInputStream);
}
@Override
public FSDataOutputStream create(Path f, boolean overwrite) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), overwrite));
});
}
@Override
public FSDataOutputStream create(Path f) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f)));
});
}
@Override
public FSDataOutputStream create(Path f, Progressable progress) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), progress));
});
}
@Override
public FSDataOutputStream create(Path f, short replication) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), replication));
});
}
@Override
public FSDataOutputStream create(Path f, short replication, Progressable progress) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), replication, progress));
});
}
@Override
public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), overwrite, bufferSize));
});
}
@Override
public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize, Progressable progress)
throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), overwrite, bufferSize, progress));
});
}
@Override
public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize, short replication, long blockSize,
Progressable progress) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f,
fileSystem.create(convertToDefaultPath(f), overwrite, bufferSize, replication, blockSize, progress));
});
}
@Override
public FSDataOutputStream create(Path f, FsPermission permission, EnumSet<CreateFlag> flags, int bufferSize,
short replication, long blockSize, Progressable progress) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f,
fileSystem.create(convertToDefaultPath(f), permission, flags, bufferSize, replication, blockSize, progress));
});
}
@Override
public FSDataOutputStream create(Path f, FsPermission permission, EnumSet<CreateFlag> flags, int bufferSize,
short replication, long blockSize, Progressable progress, Options.ChecksumOpt checksumOpt) throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f, fileSystem.create(convertToDefaultPath(f), permission, flags, bufferSize, replication,
blockSize, progress, checksumOpt));
});
}
@Override
public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize, short replication, long blockSize)
throws IOException {
return executeFuncWithTimeMetrics(MetricName.create.name(), f, () -> {
return wrapOutputStream(f,
fileSystem.create(convertToDefaultPath(f), overwrite, bufferSize, replication, blockSize));
});
}
@Override
public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException {
return wrapOutputStream(f, fileSystem.append(convertToDefaultPath(f), bufferSize, progress));
}
@Override
public boolean rename(Path src, Path dst) throws IOException {
return executeFuncWithTimeMetrics(MetricName.rename.name(), src, () -> {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(src));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + src + " to appear", e);
}
boolean success = fileSystem.rename(convertToDefaultPath(src), convertToDefaultPath(dst));
if (success) {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + dst + " to appear", e);
}
try {
consistencyGuard.waitTillFileDisappears(convertToDefaultStoragePath(src));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + src + " to disappear", e);
}
}
return success;
});
}
@Override
public boolean delete(Path f, boolean recursive) throws IOException {
return executeFuncWithTimeMetrics(MetricName.delete.name(), f, () -> {
boolean success = fileSystem.delete(convertToDefaultPath(f), recursive);
if (success) {
try {
consistencyGuard.waitTillFileDisappears(convertToStoragePath(f));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + f + " to disappear", e);
}
}
return success;
});
}
@Override
public FileStatus[] listStatus(Path f) throws IOException {
return executeFuncWithTimeMetrics(MetricName.listStatus.name(), f, () -> {
return fileSystem.listStatus(convertToDefaultPath(f));
});
}
@Override
public Path getWorkingDirectory() {
return convertToHoodiePath(fileSystem.getWorkingDirectory());
}
@Override
public void setWorkingDirectory(Path newDir) {
fileSystem.setWorkingDirectory(convertToDefaultPath(newDir));
}
@Override
public boolean mkdirs(Path f, FsPermission permission) throws IOException {
return executeFuncWithTimeMetrics(MetricName.mkdirs.name(), f, () -> {
boolean success = fileSystem.mkdirs(convertToDefaultPath(f), permission);
if (success) {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(f));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for directory " + f + " to appear", e);
}
}
return success;
});
}
@Override
public FileStatus getFileStatus(Path f) throws IOException {
return executeFuncWithTimeMetrics(MetricName.getFileStatus.name(), f, () -> {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(f));
} catch (TimeoutException e) {
// pass
}
return fileSystem.getFileStatus(convertToDefaultPath(f));
});
}
@Override
public String getScheme() {
return uri.getScheme();
}
@Override
public String getCanonicalServiceName() {
return fileSystem.getCanonicalServiceName();
}
@Override
public String getName() {
return fileSystem.getName();
}
@Override
public Path makeQualified(Path path) {
return convertToHoodiePath(fileSystem.makeQualified(convertToDefaultPath(path)));
}
@Override
public Token<?> getDelegationToken(String renewer) throws IOException {
return fileSystem.getDelegationToken(renewer);
}
@Override
public Token<?>[] addDelegationTokens(String renewer, Credentials credentials) throws IOException {
return fileSystem.addDelegationTokens(renewer, credentials);
}
@Override
public FileSystem[] getChildFileSystems() {
return fileSystem.getChildFileSystems();
}
@Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException {
return fileSystem.getFileBlockLocations(file, start, len);
}
@Override
public BlockLocation[] getFileBlockLocations(Path p, long start, long len) throws IOException {
return fileSystem.getFileBlockLocations(convertToDefaultPath(p), start, len);
}
@Override
public FsServerDefaults getServerDefaults() throws IOException {
return fileSystem.getServerDefaults();
}
@Override
public FsServerDefaults getServerDefaults(Path p) throws IOException {
return fileSystem.getServerDefaults(convertToDefaultPath(p));
}
@Override
public Path resolvePath(Path p) throws IOException {
return convertToHoodiePath(fileSystem.resolvePath(convertToDefaultPath(p)));
}
@Override
public FSDataInputStream open(Path f) throws IOException {
return wrapInputStream(f, fileSystem.open(convertToDefaultPath(f)));
}
@Override
public FSDataOutputStream createNonRecursive(Path f, boolean overwrite, int bufferSize, short replication,
long blockSize, Progressable progress) throws IOException {
Path p = convertToDefaultPath(f);
return wrapOutputStream(p,
fileSystem.createNonRecursive(p, overwrite, bufferSize, replication, blockSize, progress));
}
@Override
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, boolean overwrite, int bufferSize,
short replication, long blockSize, Progressable progress) throws IOException {
Path p = convertToDefaultPath(f);
return wrapOutputStream(p,
fileSystem.createNonRecursive(p, permission, overwrite, bufferSize, replication, blockSize, progress));
}
@Override
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet<CreateFlag> flags,
int bufferSize, short replication, long blockSize, Progressable progress) throws IOException {
Path p = convertToDefaultPath(f);
return wrapOutputStream(p,
fileSystem.createNonRecursive(p, permission, flags, bufferSize, replication, blockSize, progress));
}
@Override
public boolean createNewFile(Path f) throws IOException {
boolean newFile = fileSystem.createNewFile(convertToDefaultPath(f));
if (newFile) {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(f));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + f + " to appear", e);
}
}
return newFile;
}
@Override
public FSDataOutputStream append(Path f) throws IOException {
return wrapOutputStream(f, fileSystem.append(convertToDefaultPath(f)));
}
@Override
public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
return wrapOutputStream(f, fileSystem.append(convertToDefaultPath(f), bufferSize));
}
@Override
public void concat(Path trg, Path[] psrcs) throws IOException {
Path[] psrcsNew = convertDefaults(psrcs);
fileSystem.concat(convertToDefaultPath(trg), psrcsNew);
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(trg));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for " + trg + " to appear", e);
}
}
@Override
public short getReplication(Path src) throws IOException {
return fileSystem.getReplication(convertToDefaultPath(src));
}
@Override
public boolean setReplication(Path src, short replication) throws IOException {
return fileSystem.setReplication(convertToDefaultPath(src), replication);
}
@Override
public boolean delete(Path f) throws IOException {
return executeFuncWithTimeMetrics(MetricName.delete.name(), f, () -> {
return delete(f, true);
});
}
@Override
public boolean deleteOnExit(Path f) throws IOException {
return fileSystem.deleteOnExit(convertToDefaultPath(f));
}
@Override
public boolean cancelDeleteOnExit(Path f) {
return fileSystem.cancelDeleteOnExit(convertToDefaultPath(f));
}
@Override
public boolean exists(Path f) throws IOException {
return fileSystem.exists(convertToDefaultPath(f));
}
@Override
public boolean isDirectory(Path f) throws IOException {
return fileSystem.isDirectory(convertToDefaultPath(f));
}
@Override
public boolean isFile(Path f) throws IOException {
return fileSystem.isFile(convertToDefaultPath(f));
}
@Override
public long getLength(Path f) throws IOException {
return fileSystem.getLength(convertToDefaultPath(f));
}
@Override
public ContentSummary getContentSummary(Path f) throws IOException {
return fileSystem.getContentSummary(convertToDefaultPath(f));
}
@Override
public RemoteIterator<Path> listCorruptFileBlocks(Path path) throws IOException {
return fileSystem.listCorruptFileBlocks(convertToDefaultPath(path));
}
@Override
public FileStatus[] listStatus(Path f, PathFilter filter) throws IOException {
return executeFuncWithTimeMetrics(MetricName.listStatus.name(), f, () -> {
return fileSystem.listStatus(convertToDefaultPath(f), filter);
});
}
@Override
public FileStatus[] listStatus(Path[] files) throws IOException {
return executeFuncWithTimeMetrics(MetricName.listStatus.name(), files.length > 0 ? files[0] : null, () -> {
return fileSystem.listStatus(convertDefaults(files));
});
}
@Override
public FileStatus[] listStatus(Path[] files, PathFilter filter) throws IOException {
return executeFuncWithTimeMetrics(MetricName.listStatus.name(), files.length > 0 ? files[0] : null, () -> {
return fileSystem.listStatus(convertDefaults(files), filter);
});
}
@Override
public FileStatus[] globStatus(Path pathPattern) throws IOException {
return executeFuncWithTimeMetrics(MetricName.globStatus.name(), pathPattern, () -> {
return fileSystem.globStatus(convertToDefaultPath(pathPattern));
});
}
@Override
public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException {
return executeFuncWithTimeMetrics(MetricName.globStatus.name(), pathPattern, () -> {
return fileSystem.globStatus(convertToDefaultPath(pathPattern), filter);
});
}
@Override
public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f) throws IOException {
return fileSystem.listLocatedStatus(convertToDefaultPath(f));
}
@Override
public RemoteIterator<LocatedFileStatus> listFiles(Path f, boolean recursive) throws IOException {
return executeFuncWithTimeMetrics(MetricName.listFiles.name(), f, () -> {
return fileSystem.listFiles(convertToDefaultPath(f), recursive);
});
}
@Override
public Path getHomeDirectory() {
return convertToHoodiePath(fileSystem.getHomeDirectory());
}
@Override
public boolean mkdirs(Path f) throws IOException {
return executeFuncWithTimeMetrics(MetricName.mkdirs.name(), f, () -> {
boolean success = fileSystem.mkdirs(convertToDefaultPath(f));
if (success) {
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(f));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for directory " + f + " to appear", e);
}
}
return success;
});
}
@Override
public void copyFromLocalFile(Path src, Path dst) throws IOException {
fileSystem.copyFromLocalFile(convertToLocalPath(src), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void moveFromLocalFile(Path[] srcs, Path dst) throws IOException {
fileSystem.moveFromLocalFile(convertLocalPaths(srcs), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void moveFromLocalFile(Path src, Path dst) throws IOException {
fileSystem.moveFromLocalFile(convertToLocalPath(src), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws IOException {
fileSystem.copyFromLocalFile(delSrc, convertToLocalPath(src), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path[] srcs, Path dst) throws IOException {
fileSystem.copyFromLocalFile(delSrc, overwrite, convertLocalPaths(srcs), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void copyFromLocalFile(boolean delSrc, boolean overwrite, Path src, Path dst) throws IOException {
fileSystem.copyFromLocalFile(delSrc, overwrite, convertToLocalPath(src), convertToDefaultPath(dst));
try {
consistencyGuard.waitTillFileAppears(convertToDefaultStoragePath(dst));
} catch (TimeoutException e) {
throw new HoodieException("Timed out waiting for destination " + dst + " to appear", e);
}
}
@Override
public void copyToLocalFile(Path src, Path dst) throws IOException {
fileSystem.copyToLocalFile(convertToDefaultPath(src), convertToLocalPath(dst));
}
@Override
public void moveToLocalFile(Path src, Path dst) throws IOException {
fileSystem.moveToLocalFile(convertToDefaultPath(src), convertToLocalPath(dst));
}
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws IOException {
fileSystem.copyToLocalFile(delSrc, convertToDefaultPath(src), convertToLocalPath(dst));
}
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst, boolean useRawLocalFileSystem) throws IOException {
fileSystem.copyToLocalFile(delSrc, convertToDefaultPath(src), convertToLocalPath(dst), useRawLocalFileSystem);
}
@Override
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException {
return convertToHoodiePath(
fileSystem.startLocalOutput(convertToDefaultPath(fsOutputFile), convertToDefaultPath(tmpLocalFile)));
}
@Override
public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile) throws IOException {
fileSystem.completeLocalOutput(convertToDefaultPath(fsOutputFile), convertToDefaultPath(tmpLocalFile));
}
@Override
public void close() throws IOException {
// Don't close the wrapped `fileSystem` object. This will end up closing it for every thread since it
// could be cached across jvm. We don't own that object anyway.
super.close();
}
@Override
public long getUsed() throws IOException {
return fileSystem.getUsed();
}
@Override
public long getBlockSize(Path f) throws IOException {
return fileSystem.getBlockSize(convertToDefaultPath(f));
}
@Override
public long getDefaultBlockSize() {
return fileSystem.getDefaultBlockSize();
}
@Override
public long getDefaultBlockSize(Path f) {
return fileSystem.getDefaultBlockSize(convertToDefaultPath(f));
}
@Override
public short getDefaultReplication() {
return fileSystem.getDefaultReplication();
}
@Override
public short getDefaultReplication(Path path) {
return fileSystem.getDefaultReplication(convertToDefaultPath(path));
}
@Override
public void access(Path path, FsAction mode) throws IOException {
fileSystem.access(convertToDefaultPath(path), mode);
}
@Override
public void createSymlink(Path target, Path link, boolean createParent) throws IOException {
fileSystem.createSymlink(convertToDefaultPath(target), convertToDefaultPath(link), createParent);
}
@Override
public FileStatus getFileLinkStatus(Path f) throws IOException {
return fileSystem.getFileLinkStatus(convertToDefaultPath(f));
}
@Override
public boolean supportsSymlinks() {
return fileSystem.supportsSymlinks();
}
@Override
public Path getLinkTarget(Path f) throws IOException {
return convertToHoodiePath(fileSystem.getLinkTarget(convertToDefaultPath(f)));
}
@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
return fileSystem.getFileChecksum(convertToDefaultPath(f));
}
@Override
public FileChecksum getFileChecksum(Path f, long length) throws IOException {
return fileSystem.getFileChecksum(convertToDefaultPath(f), length);
}
@Override
public void setVerifyChecksum(boolean verifyChecksum) {
fileSystem.setVerifyChecksum(verifyChecksum);
}
@Override
public void setWriteChecksum(boolean writeChecksum) {
fileSystem.setWriteChecksum(writeChecksum);
}
@Override
public FsStatus getStatus() throws IOException {
return fileSystem.getStatus();
}
@Override
public FsStatus getStatus(Path p) throws IOException {
return fileSystem.getStatus(convertToDefaultPath(p));
}
@Override
public void setPermission(Path p, FsPermission permission) throws IOException {
fileSystem.setPermission(convertToDefaultPath(p), permission);
}
@Override
public void setOwner(Path p, String username, String groupname) throws IOException {
fileSystem.setOwner(convertToDefaultPath(p), username, groupname);
}
@Override
public void setTimes(Path p, long mtime, long atime) throws IOException {
fileSystem.setTimes(convertToDefaultPath(p), mtime, atime);
}
@Override
public Path createSnapshot(Path path, String snapshotName) throws IOException {
return convertToHoodiePath(fileSystem.createSnapshot(convertToDefaultPath(path), snapshotName));
}
@Override
public void renameSnapshot(Path path, String snapshotOldName, String snapshotNewName) throws IOException {
fileSystem.renameSnapshot(convertToDefaultPath(path), snapshotOldName, snapshotNewName);
}
@Override
public void deleteSnapshot(Path path, String snapshotName) throws IOException {
fileSystem.deleteSnapshot(convertToDefaultPath(path), snapshotName);
}
@Override
public void modifyAclEntries(Path path, List<AclEntry> aclSpec) throws IOException {
fileSystem.modifyAclEntries(convertToDefaultPath(path), aclSpec);
}
@Override
public void removeAclEntries(Path path, List<AclEntry> aclSpec) throws IOException {
fileSystem.removeAclEntries(convertToDefaultPath(path), aclSpec);
}
@Override
public void removeDefaultAcl(Path path) throws IOException {
fileSystem.removeDefaultAcl(convertToDefaultPath(path));
}
@Override
public void removeAcl(Path path) throws IOException {
fileSystem.removeAcl(convertToDefaultPath(path));
}
@Override
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
fileSystem.setAcl(convertToDefaultPath(path), aclSpec);
}
@Override
public AclStatus getAclStatus(Path path) throws IOException {
return fileSystem.getAclStatus(convertToDefaultPath(path));
}
@Override
public void setXAttr(Path path, String name, byte[] value) throws IOException {
fileSystem.setXAttr(convertToDefaultPath(path), name, value);
}
@Override
public void setXAttr(Path path, String name, byte[] value, EnumSet<XAttrSetFlag> flag) throws IOException {
fileSystem.setXAttr(convertToDefaultPath(path), name, value, flag);
}
@Override
public byte[] getXAttr(Path path, String name) throws IOException {
return fileSystem.getXAttr(convertToDefaultPath(path), name);
}
@Override
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
return fileSystem.getXAttrs(convertToDefaultPath(path));
}
@Override
public Map<String, byte[]> getXAttrs(Path path, List<String> names) throws IOException {
return fileSystem.getXAttrs(convertToDefaultPath(path), names);
}
@Override
public List<String> listXAttrs(Path path) throws IOException {
return fileSystem.listXAttrs(convertToDefaultPath(path));
}
@Override
public void removeXAttr(Path path, String name) throws IOException {
fileSystem.removeXAttr(convertToDefaultPath(path), name);
}
@Override
public Configuration getConf() {
return fileSystem.getConf();
}
@Override
public void setConf(Configuration conf) {
// ignore this. we will set conf on init
}
@Override
public int hashCode() {
return fileSystem.hashCode();
}
@Override
public boolean equals(Object obj) {
return fileSystem.equals(obj);
}
@Override
public String toString() {
return fileSystem.toString();
}
public Path convertToHoodiePath(Path oldPath) {
return convertPathWithScheme(oldPath, getHoodieScheme(getScheme()));
}
private Path convertToDefaultPath(Path oldPath) {
return convertPathWithScheme(oldPath, getScheme());
}
private StoragePath convertToDefaultStoragePath(Path oldPath) {
return convertToStoragePath(convertPathWithScheme(oldPath, getScheme()));
}
private Path convertToLocalPath(Path oldPath) {
try {
return convertPathWithScheme(oldPath, FileSystem.getLocal(getConf()).getScheme());
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
}
private Path[] convertLocalPaths(Path[] psrcs) {
Path[] psrcsNew = new Path[psrcs.length];
for (int i = 0; i < psrcs.length; i++) {
psrcsNew[i] = convertToLocalPath(psrcs[i]);
}
return psrcsNew;
}
private Path[] convertDefaults(Path[] psrcs) {
Path[] psrcsNew = new Path[psrcs.length];
for (int i = 0; i < psrcs.length; i++) {
psrcsNew[i] = convertToDefaultPath(psrcs[i]);
}
return psrcsNew;
}
public long getBytesWritten(Path file) {
if (openStreams.containsKey(file.getName())) {
return openStreams.get(file.getName()).getBytesWritten();
}
// When the file is first written, we do not have a track of it
throw new IllegalArgumentException(
file + " does not have a open stream. Cannot get the bytes written on the stream");
}
public FileSystem getFileSystem() {
return fileSystem;
}
}
|
apache/solr | 35,531 | solr/core/src/java/org/apache/solr/cluster/placement/plugins/AffinityPlacementFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cluster.placement.plugins;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.solr.cluster.Cluster;
import org.apache.solr.cluster.Node;
import org.apache.solr.cluster.Replica;
import org.apache.solr.cluster.SolrCollection;
import org.apache.solr.cluster.placement.AttributeFetcher;
import org.apache.solr.cluster.placement.AttributeValues;
import org.apache.solr.cluster.placement.BalanceRequest;
import org.apache.solr.cluster.placement.DeleteCollectionRequest;
import org.apache.solr.cluster.placement.PlacementContext;
import org.apache.solr.cluster.placement.PlacementException;
import org.apache.solr.cluster.placement.PlacementModificationException;
import org.apache.solr.cluster.placement.PlacementPlugin;
import org.apache.solr.cluster.placement.PlacementPluginFactory;
import org.apache.solr.cluster.placement.ReplicaMetric;
import org.apache.solr.cluster.placement.ShardMetrics;
import org.apache.solr.cluster.placement.impl.NodeMetricImpl;
import org.apache.solr.cluster.placement.impl.ReplicaMetricImpl;
import org.apache.solr.common.util.CollectionUtil;
import org.apache.solr.common.util.StrUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This factory is instantiated by config from its class name. Using it is the only way to create
* instances of {@link AffinityPlacementPlugin}.
*
* <p>In order to configure this plugin to be used for placement decisions, the following {@code
* curl} command (or something equivalent) has to be executed once the cluster is already running in
* order to set the appropriate Zookeeper stored configuration. Replace {@code localhost:8983} by
* one of your servers' IP address and port.
*
* <pre>
*
* curl -X POST -H 'Content-type:application/json' -d '{
* "add": {
* "name": ".placement-plugin",
* "class": "org.apache.solr.cluster.placement.plugins.AffinityPlacementFactory",
* "config": {
* "minimalFreeDiskGB": 10,
* "prioritizedFreeDiskGB": 50
* }
* }
* }' http://localhost:8983/api/cluster/plugin
* </pre>
*
* <p>In order to delete the placement-plugin section (and to fallback to either Legacy or rule
* based placement if configured for a collection), execute:
*
* <pre>
*
* curl -X POST -H 'Content-type:application/json' -d '{
* "remove" : ".placement-plugin"
* }' http://localhost:8983/api/cluster/plugin
* </pre>
*
* <p>{@link AffinityPlacementPlugin} implements placing replicas in a way that replicate past
* Autoscaling config defined <a
* href="https://github.com/lucidworks/fusion-cloud-native/blob/master/policy.json#L16">here</a>.
*
* <p>This specification is doing the following:
*
* <p><i>Spread replicas per shard as evenly as possible across multiple availability zones (given
* by a sys prop), assign replicas based on replica type to specific kinds of nodes (another sys
* prop), and avoid having more than one replica per shard on the same node.<br>
* Only after these constraints are satisfied do minimize cores per node or disk usage.</i>
*
* <p>This plugin achieves this by creating a {@link AffinityPlacementPlugin.AffinityNode} that
* weights nodes very high if they are unbalanced with respect to AvailabilityZone and SpreadDomain.
* See {@link AffinityPlacementPlugin.AffinityNode} for more information on how this weighting helps
* the plugin correctly place and balance replicas.
*
* <p>This code is a realistic placement computation, based on a few assumptions. The code is
* written in such a way to make it relatively easy to adapt it to (somewhat) different assumptions.
* Additional configuration options could be introduced to allow configuration base option selection
* as well...
*/
public class AffinityPlacementFactory implements PlacementPluginFactory<AffinityPlacementConfig> {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
AffinityPlacementConfig config = AffinityPlacementConfig.DEFAULT;
/**
* Empty public constructor is used to instantiate this factory. Using a factory pattern to allow
* the factory to do one time costly operations if needed, and to only have to instantiate a
* default constructor class by name, rather than having to call a constructor with more
* parameters (if we were to instantiate the plugin class directly without going through a
* factory).
*/
public AffinityPlacementFactory() {}
@Override
public PlacementPlugin createPluginInstance() {
config.validate();
return new AffinityPlacementPlugin(
config.minimalFreeDiskGB,
config.prioritizedFreeDiskGB,
config.withCollection,
config.withCollectionShards,
config.collectionNodeType,
config.spreadAcrossDomains);
}
@Override
public void configure(AffinityPlacementConfig cfg) {
Objects.requireNonNull(cfg, "configuration must never be null");
cfg.validate();
this.config = cfg;
}
@Override
public AffinityPlacementConfig getConfig() {
return config;
}
/**
* See {@link AffinityPlacementFactory} for instructions on how to configure a cluster to use this
* plugin and details on what the plugin does.
*/
public static class AffinityPlacementPlugin extends OrderedNodePlacementPlugin {
private final long minimalFreeDiskGB;
private final long prioritizedFreeDiskGB;
// primary to secondary (1:1)
private final Map<String, String> withCollections;
// same but shardwise
private final Map<String, String> withCollectionShards;
// secondary to primary (1:N) + shard-wise_primary (1:N)
private final Map<String, Set<String>> collocatedWith;
private final Map<String, Set<String>> nodeTypes;
private final boolean spreadAcrossDomains;
/**
* The factory has decoded the configuration for the plugin instance and passes it the
* parameters it needs.
*/
AffinityPlacementPlugin(
long minimalFreeDiskGB,
long prioritizedFreeDiskGB,
Map<String, String> withCollections,
Map<String, String> withCollectionShards,
Map<String, String> collectionNodeTypes,
boolean spreadAcrossDomains) {
this.minimalFreeDiskGB = minimalFreeDiskGB;
this.prioritizedFreeDiskGB = prioritizedFreeDiskGB;
Objects.requireNonNull(withCollections, "withCollections must not be null");
Objects.requireNonNull(collectionNodeTypes, "collectionNodeTypes must not be null");
Objects.requireNonNull(withCollectionShards, "withCollectionShards must not be null");
this.spreadAcrossDomains = spreadAcrossDomains;
this.withCollections = withCollections;
this.withCollectionShards = withCollectionShards;
Map<String, Set<String>> collocated = new HashMap<>();
// reverse both relations: shard-agnostic and shard-wise
List.of(this.withCollections, this.withCollectionShards)
.forEach(
direct ->
direct.forEach(
(primary, secondary) ->
collocated
.computeIfAbsent(secondary, s -> new HashSet<>())
.add(primary)));
this.collocatedWith = Collections.unmodifiableMap(collocated);
if (collectionNodeTypes.isEmpty()) {
nodeTypes = Map.of();
} else {
nodeTypes = new HashMap<>();
collectionNodeTypes.forEach(
(coll, typesString) -> {
List<String> types = StrUtils.splitSmart(typesString, ',', true);
if (!types.isEmpty()) {
nodeTypes.put(coll, new HashSet<>(types));
}
});
}
}
@Override
protected void verifyDeleteCollection(
DeleteCollectionRequest deleteCollectionRequest, PlacementContext placementContext)
throws PlacementModificationException {
Cluster cluster = placementContext.getCluster();
Set<String> collocatedCollections =
collocatedWith.getOrDefault(deleteCollectionRequest.getCollection().getName(), Set.of());
for (String primaryName : collocatedCollections) {
try {
if (cluster.getCollection(primaryName) != null) {
// still exists
throw new PlacementModificationException(
"collocated collection "
+ primaryName
+ " of "
+ deleteCollectionRequest.getCollection().getName()
+ " still present");
}
} catch (IOException e) {
throw new PlacementModificationException(
"failed to retrieve collocated collection information", e);
}
}
}
/**
* AffinityPlacementContext is used to share information across {@link AffinityNode} instances.
*
* <p>For instance, with SpreadDomains and AvailabilityZones, the weighting of a Node requires
* information on the contents of other Nodes. This class is how that information is shared.
*
* <p>One AffinityPlacementContext is used for each call to {@link
* #computePlacements(Collection, PlacementContext)} or {@link #computeBalancing(BalanceRequest,
* PlacementContext)}. The state of the context will be altered throughout the computation.
*/
private static final class AffinityPlacementContext {
private final Set<String> allSpreadDomains = new HashSet<>();
private final Map<String, Map<String, ReplicaSpread>> spreadDomainUsage = new HashMap<>();
private final Set<String> allAvailabilityZones = new HashSet<>();
private final Map<String, Map<String, Map<Replica.ReplicaType, ReplicaSpread>>>
availabilityZoneUsage = new HashMap<>();
private boolean doSpreadAcrossDomains;
}
@Override
protected Map<Node, WeightedNode> getBaseWeightedNodes(
PlacementContext placementContext,
Set<Node> nodes,
Iterable<SolrCollection> relevantCollections,
boolean skipNodesWithErrors)
throws PlacementException {
// Fetch attributes for a superset of all nodes requested amongst the placementRequests
AttributeFetcher attributeFetcher = placementContext.getAttributeFetcher();
attributeFetcher
.requestNodeSystemProperty(AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP)
.requestNodeSystemProperty(AffinityPlacementConfig.NODE_TYPE_SYSPROP)
.requestNodeSystemProperty(AffinityPlacementConfig.REPLICA_TYPE_SYSPROP)
.requestNodeSystemProperty(AffinityPlacementConfig.SPREAD_DOMAIN_SYSPROP);
attributeFetcher
.requestNodeMetric(NodeMetricImpl.NUM_CORES)
.requestNodeMetric(NodeMetricImpl.FREE_DISK_GB);
Set<ReplicaMetric<?>> replicaMetrics = Set.of(ReplicaMetricImpl.INDEX_SIZE_GB);
Set<String> requestedCollections = new HashSet<>();
for (SolrCollection collection : relevantCollections) {
if (requestedCollections.add(collection.getName())) {
attributeFetcher.requestCollectionMetrics(collection, replicaMetrics);
}
}
attributeFetcher.fetchFrom(nodes);
final AttributeValues attrValues = attributeFetcher.fetchAttributes();
AffinityPlacementContext affinityPlacementContext = new AffinityPlacementContext();
affinityPlacementContext.doSpreadAcrossDomains = spreadAcrossDomains;
Map<Node, WeightedNode> affinityNodeMap = CollectionUtil.newHashMap(nodes.size());
for (Node node : nodes) {
AffinityNode affinityNode =
newNodeFromMetrics(node, attrValues, affinityPlacementContext, skipNodesWithErrors);
if (affinityNode != null) {
affinityNodeMap.put(node, affinityNode);
}
}
// If there are not multiple spreadDomains, then there is nothing to spread across
if (affinityPlacementContext.allSpreadDomains.size() < 2) {
affinityPlacementContext.doSpreadAcrossDomains = false;
}
return affinityNodeMap;
}
AffinityNode newNodeFromMetrics(
Node node,
AttributeValues attrValues,
AffinityPlacementContext affinityPlacementContext,
boolean skipNodesWithErrors)
throws PlacementException {
Set<Replica.ReplicaType> supportedReplicaTypes =
attrValues.getSystemProperty(node, AffinityPlacementConfig.REPLICA_TYPE_SYSPROP).stream()
.flatMap(s -> Arrays.stream(s.split(",")))
.map(String::trim)
.map(s -> s.toUpperCase(Locale.ROOT))
.map(
s -> {
try {
return Replica.ReplicaType.valueOf(s);
} catch (IllegalArgumentException e) {
log.warn(
"Node {} has an invalid value for the {} systemProperty: {}",
node.getName(),
AffinityPlacementConfig.REPLICA_TYPE_SYSPROP,
s);
return null;
}
})
.collect(Collectors.toSet());
if (supportedReplicaTypes.isEmpty()) {
// If property not defined or is only whitespace on a node, assuming node can take any
// replica type
supportedReplicaTypes = Set.of(Replica.ReplicaType.values());
}
Set<String> nodeType;
Optional<String> nodePropOpt =
attrValues.getSystemProperty(node, AffinityPlacementConfig.NODE_TYPE_SYSPROP);
if (nodePropOpt.isEmpty()) {
nodeType = Collections.emptySet();
} else {
nodeType = new HashSet<>(StrUtils.splitSmart(nodePropOpt.get(), ','));
}
Optional<Double> nodeFreeDiskGB = attrValues.getNodeMetric(node, NodeMetricImpl.FREE_DISK_GB);
Optional<Integer> nodeNumCores = attrValues.getNodeMetric(node, NodeMetricImpl.NUM_CORES);
String az =
attrValues
.getSystemProperty(node, AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP)
.orElse(AffinityPlacementConfig.UNDEFINED_AVAILABILITY_ZONE);
affinityPlacementContext.allAvailabilityZones.add(az);
String spreadDomain;
if (affinityPlacementContext.doSpreadAcrossDomains) {
spreadDomain =
attrValues
.getSystemProperty(node, AffinityPlacementConfig.SPREAD_DOMAIN_SYSPROP)
.orElse(null);
if (spreadDomain == null) {
if (log.isWarnEnabled()) {
log.warn(
"AffinityPlacementPlugin configured to spread across domains, but node {} does not have the {} system property. Ignoring spreadAcrossDomains.",
node.getName(),
AffinityPlacementConfig.SPREAD_DOMAIN_SYSPROP);
}
// In the context stop using spreadDomains, because we have a node without a spread
// domain.
affinityPlacementContext.doSpreadAcrossDomains = false;
affinityPlacementContext.allSpreadDomains.clear();
} else {
affinityPlacementContext.allSpreadDomains.add(spreadDomain);
}
} else {
spreadDomain = null;
}
if (nodeFreeDiskGB.isEmpty() && skipNodesWithErrors) {
if (log.isWarnEnabled()) {
log.warn(
"Unknown free disk on node {}, excluding it from placement decisions.",
node.getName());
}
return null;
} else if (nodeNumCores.isEmpty() && skipNodesWithErrors) {
if (log.isWarnEnabled()) {
log.warn(
"Unknown number of cores on node {}, excluding it from placement decisions.",
node.getName());
}
return null;
} else {
return new AffinityNode(
node,
attrValues,
affinityPlacementContext,
supportedReplicaTypes,
nodeType,
nodeNumCores.orElse(0),
nodeFreeDiskGB.orElse(0D),
az,
spreadDomain);
}
}
/**
* This implementation weights nodes in order to achieve balancing across AvailabilityZones and
* SpreadDomains, while trying to minimize the amount of replicas on a node and ensure a given
* disk space per node. This implementation also supports limiting the placement of certain
* replica types per node and co-locating collections.
*
* <p>The total weight of the AffinityNode is the sum of:
*
* <ul>
* <li>The number of replicas on the node
* <li>100 if the free disk space on the node < prioritizedFreeDiskGB, otherwise 0
* <li>If SpreadDomains are used:<br>
* 10,000 * the sum over each collection/shard:
* <ul>
* <li>(# of replicas in this node's spread domain - the minimum spreadDomain's
* replicaCount)^2 <br>
* <i>These are individually squared to penalize higher values when summing up all
* values</i>
* </ul>
* <li>If AvailabilityZones are used:<br>
* 1,000,000 * the sum over each collection/shard/replicaType:
* <ul>
* <li>(# of replicas in this node's AZ - the minimum AZ's replicaCount)^2 <br>
* <i>These are individually squared to penalize higher values when summing up all
* values</i>
* </ul>
* </ul>
*
* The weighting here ensures that the order of importance for nodes is:
*
* <ol>
* <li>Spread replicas of the same shard/replicaType across availabilityZones
* <li>Spread replicas of the same shard across spreadDomains
* <li>Make sure that replicas are not placed on nodes that have < prioritizedFreeDiskGB disk
* space available
* <li>Minimize the amount of replicas on the node
* </ol>
*
* <p>The "relevant" weight with a replica is the sum of:
*
* <ul>
* <li>The number of replicas on the node
* <li>100 if the projected free disk space on the node < prioritizedFreeDiskGB, otherwise 0
* <li>If SpreadDomains are used:<br>
* 10,000 * ( # of replicas for the replica's shard in this node's spread domain - the
* minimum spreadDomain's replicaCount )
* <li>If AvailabilityZones are used:<br>
* 1,000,000 * ( # of replicas for the replica's shard & replicaType in this node's AZ -
* the minimum AZ's replicaCount )
* </ul>
*
* <p>Multiple replicas of the same shard are not permitted to live on the same Node.
*
* <p>Users can specify withCollection, to ensure that co-placement of replicas is ensured when
* computing new replica placements or replica balancing.
*/
private class AffinityNode extends WeightedNode {
private final AttributeValues attrValues;
private final AffinityPlacementContext affinityPlacementContext;
private final Set<Replica.ReplicaType> supportedReplicaTypes;
private final Set<String> nodeType;
private int coresOnNode;
private double nodeFreeDiskGB;
private final String availabilityZone;
private final String spreadDomain;
AffinityNode(
Node node,
AttributeValues attrValues,
AffinityPlacementContext affinityPlacementContext,
Set<Replica.ReplicaType> supportedReplicaTypes,
Set<String> nodeType,
int coresOnNode,
double nodeFreeDiskGB,
String az,
String spreadDomain) {
super(node);
this.attrValues = attrValues;
this.affinityPlacementContext = affinityPlacementContext;
this.supportedReplicaTypes = supportedReplicaTypes;
this.nodeType = nodeType;
this.coresOnNode = coresOnNode;
this.nodeFreeDiskGB = nodeFreeDiskGB;
this.availabilityZone = az;
this.spreadDomain = spreadDomain;
}
@Override
public int calcWeight() {
return coresOnNode
// Only add 100 if prioritizedFreeDiskGB was provided and the node's freeDisk is lower
// than it
+ 100 * (prioritizedFreeDiskGB > 0 && nodeFreeDiskGB < prioritizedFreeDiskGB ? 1 : 0)
+ 10000 * getSpreadDomainWeight()
+ 1000000 * getAZWeight();
}
@Override
public int calcRelevantWeightWithReplica(Replica replica) {
return coresOnNode
// Only add 100 if prioritizedFreeDiskGB was provided and the node's projected freeDisk
// is lower than it
+ 100
* (prioritizedFreeDiskGB > 0
&& nodeFreeDiskGB - getProjectedSizeOfReplica(replica)
< prioritizedFreeDiskGB
? 1
: 0)
+ 10000 * projectReplicaSpreadWeight(replica)
+ 1000000 * projectAZWeight(replica);
}
@Override
public boolean canAddReplica(Replica replica) {
String collection = replica.getShard().getCollection().getName();
// By default, do not allow two replicas of the same shard on a node
return super.canAddReplica(replica)
// Filter out unsupported replica types
&& supportedReplicaTypes.contains(replica.getType())
// Filter out unsupported node types
&& Optional.ofNullable(nodeTypes.get(collection))
.map(s -> s.stream().anyMatch(nodeType::contains))
.orElse(true)
// Ensure any co-located collections already exist on the Node
&& Optional.ofNullable(withCollections.get(collection))
.map(this::hasCollectionOnNode)
.orElse(true)
// Ensure same shard is collocated if required
&& Optional.ofNullable(withCollectionShards.get(collection))
.map(
shardWiseOf ->
getShardsOnNode(shardWiseOf).contains(replica.getShard().getShardName()))
.orElse(true)
// Ensure the disk space will not go below the minimum if the replica is added
&& (minimalFreeDiskGB <= 0
|| nodeFreeDiskGB - getProjectedSizeOfReplica(replica) > minimalFreeDiskGB);
}
/**
* Return any replicas that cannot be removed because there are collocated collections that
* require the replica to exist.
*
* @param replicas the replicas to remove
* @return any errors for replicas that cannot be removed
*/
@Override
public Map<Replica, String> canRemoveReplicas(Collection<Replica> replicas) {
Map<Replica, String> replicaRemovalExceptions = new HashMap<>();
Map<String, Map<String, Set<Replica>>> removals = new HashMap<>();
for (Replica replica : replicas) {
SolrCollection collection = replica.getShard().getCollection();
Set<String> collocatedCollections = new HashSet<>();
Optional.ofNullable(collocatedWith.get(collection.getName()))
.ifPresent(collocatedCollections::addAll);
collocatedCollections.retainAll(getCollectionsOnNode());
if (collocatedCollections.isEmpty()) {
continue;
}
Stream<String> shardWiseCollocations =
collocatedCollections.stream()
.filter(
priColl -> collection.getName().equals(withCollectionShards.get(priColl)));
final Set<String> mandatoryShardsOrAll =
shardWiseCollocations
.flatMap(priColl -> getShardsOnNode(priColl).stream())
.collect(Collectors.toSet());
// There are collocatedCollections for this shard, so make sure there is a replica of this
// shard left on the node after it is removed
Set<Replica> replicasRemovedForShard =
removals
.computeIfAbsent(
replica.getShard().getCollection().getName(), k -> new HashMap<>())
.computeIfAbsent(replica.getShard().getShardName(), k -> new HashSet<>());
replicasRemovedForShard.add(replica);
// either if all shards are mandatory, or the current one is mandatory
boolean shardWise = false;
if (mandatoryShardsOrAll.isEmpty()
|| (shardWise = mandatoryShardsOrAll.contains(replica.getShard().getShardName()))) {
if (replicasRemovedForShard.size()
>= getReplicasForShardOnNode(replica.getShard()).size()) {
replicaRemovalExceptions.put(
replica,
"co-located with replicas of "
+ (shardWise ? replica.getShard().getShardName() + " of " : "")
+ collocatedCollections);
}
}
}
return replicaRemovalExceptions;
}
@Override
protected boolean addProjectedReplicaWeights(Replica replica) {
nodeFreeDiskGB -= getProjectedSizeOfReplica(replica);
coresOnNode += 1;
return addReplicaToAzAndSpread(replica);
}
@Override
protected void initReplicaWeights(Replica replica) {
addReplicaToAzAndSpread(replica);
}
private boolean addReplicaToAzAndSpread(Replica replica) {
boolean needsResort = false;
// Only use AvailabilityZones if there are more than 1
if (affinityPlacementContext.allAvailabilityZones.size() > 1) {
needsResort |=
affinityPlacementContext
.availabilityZoneUsage
.computeIfAbsent(
replica.getShard().getCollection().getName(), k -> new HashMap<>())
.computeIfAbsent(replica.getShard().getShardName(), k -> new HashMap<>())
.computeIfAbsent(
replica.getType(),
k -> new ReplicaSpread(affinityPlacementContext.allAvailabilityZones))
.addReplica(availabilityZone);
}
// Only use SpreadDomains if they have been provided to all nodes and there are more than 1
if (affinityPlacementContext.doSpreadAcrossDomains) {
needsResort |=
affinityPlacementContext
.spreadDomainUsage
.computeIfAbsent(
replica.getShard().getCollection().getName(), k -> new HashMap<>())
.computeIfAbsent(
replica.getShard().getShardName(),
k -> new ReplicaSpread(affinityPlacementContext.allSpreadDomains))
.addReplica(spreadDomain);
}
return needsResort;
}
@Override
protected void removeProjectedReplicaWeights(Replica replica) {
nodeFreeDiskGB += getProjectedSizeOfReplica(replica);
coresOnNode -= 1;
// Only use AvailabilityZones if there are more than 1
if (affinityPlacementContext.allAvailabilityZones.size() > 1) {
Optional.ofNullable(
affinityPlacementContext.availabilityZoneUsage.get(
replica.getShard().getCollection().getName()))
.map(m -> m.get(replica.getShard().getShardName()))
.map(m -> m.get(replica.getType()))
.ifPresent(m -> m.removeReplica(availabilityZone));
}
// Only use SpreadDomains if they have been provided to all nodes and there are more than 1
if (affinityPlacementContext.doSpreadAcrossDomains) {
Optional.ofNullable(
affinityPlacementContext.spreadDomainUsage.get(
replica.getShard().getCollection().getName()))
.map(m -> m.get(replica.getShard().getShardName()))
.ifPresent(m -> m.removeReplica(spreadDomain));
}
}
private double getProjectedSizeOfReplica(Replica replica) {
return attrValues
.getCollectionMetrics(replica.getShard().getCollection().getName())
.flatMap(colMetrics -> colMetrics.getShardMetrics(replica.getShard().getShardName()))
.flatMap(ShardMetrics::getLeaderMetrics)
.flatMap(lrm -> lrm.getReplicaMetric(ReplicaMetricImpl.INDEX_SIZE_GB))
.orElse(0D);
}
/**
* If there are more than one spreadDomains given in the cluster, then return a weight for
* this node, given the number of replicas in its spreadDomain.
*
* <p>For each Collection & Shard, sum up the number of replicas this node's SpreadDomain has
* over the minimum SpreadDomain. Square each value before summing, to ensure that smaller
* number of higher values are penalized more than a larger number of smaller values.
*
* @return the weight
*/
private int getSpreadDomainWeight() {
if (affinityPlacementContext.doSpreadAcrossDomains) {
return affinityPlacementContext.spreadDomainUsage.values().stream()
.flatMap(m -> m.values().stream())
.mapToInt(rs -> rs.overMinimum(spreadDomain))
.map(i -> i * i)
.sum();
} else {
return 0;
}
}
/**
* If there are more than one SpreadDomains given in the cluster, then return a projected
* SpreadDomain weight for this node and this replica.
*
* <p>For the new replica's Collection & Shard, project the number of replicas this node's
* SpreadDomain has over the minimum SpreadDomain.
*
* @return the weight
*/
private int projectReplicaSpreadWeight(Replica replica) {
if (replica != null && affinityPlacementContext.doSpreadAcrossDomains) {
return Optional.ofNullable(
affinityPlacementContext.spreadDomainUsage.get(
replica.getShard().getCollection().getName()))
.map(m -> m.get(replica.getShard().getShardName()))
.map(rs -> rs.projectOverMinimum(spreadDomain, 1))
.orElse(0);
} else {
return 0;
}
}
/**
* If there are more than one AvailabilityZones given in the cluster, then return a weight for
* this node, given the number of replicas in its availabilityZone.
*
* <p>For each Collection, Shard & ReplicaType, sum up the number of replicas this node's
* AvailabilityZone has over the minimum AvailabilityZone. Square each value before summing,
* to ensure that smaller number of higher values are penalized more than a larger number of
* smaller values.
*
* @return the weight
*/
private int getAZWeight() {
if (affinityPlacementContext.allAvailabilityZones.size() < 2) {
return 0;
} else {
return affinityPlacementContext.availabilityZoneUsage.values().stream()
.flatMap(m -> m.values().stream())
.flatMap(m -> m.values().stream())
.mapToInt(rs -> rs.overMinimum(availabilityZone))
.map(i -> i * i)
.sum();
}
}
/**
* If there are more than one AvailabilityZones given in the cluster, then return a projected
* AvailabilityZone weight for this node and this replica.
*
* <p>For the new replica's Collection, Shard & ReplicaType, project the number of replicas
* this node's AvailabilityZone has over the minimum AvailabilityZone.
*
* @return the weight
*/
private int projectAZWeight(Replica replica) {
if (replica == null || affinityPlacementContext.allAvailabilityZones.size() < 2) {
return 0;
} else {
return Optional.ofNullable(
affinityPlacementContext.availabilityZoneUsage.get(
replica.getShard().getCollection().getName()))
.map(m -> m.get(replica.getShard().getShardName()))
.map(m -> m.get(replica.getType()))
.map(rs -> rs.projectOverMinimum(availabilityZone, 1))
.orElse(0);
}
}
}
private static class ReplicaSpread {
private final Set<String> allKeys;
private final Map<String, Integer> spread;
private int minReplicasLocated;
private ReplicaSpread(Set<String> allKeys) {
this.allKeys = allKeys;
this.spread = new HashMap<>();
this.minReplicasLocated = 0;
}
int overMinimum(String key) {
return spread.getOrDefault(key, 0) - minReplicasLocated;
}
/**
* Trying adding a replica for the given spread key, and return the {@link
* #overMinimum(String)} with it added. Remove the replica, so that the state is unchanged
* from when the method was called.
*/
int projectOverMinimum(String key, int replicaDelta) {
int overMinimum = overMinimum(key);
if (overMinimum == 0 && replicaDelta > 0) {
addReplica(key);
int projected = overMinimum(key);
removeReplica(key);
return projected;
} else {
return Integer.max(0, overMinimum + replicaDelta);
}
}
/**
* Add a replica for the given spread key, returning whether a full resorting is needed for
* AffinityNodes. Resorting is only needed if other nodes could possibly have a lower weight
* than before.
*
* @param key the spread key for the replica that should be added
* @return whether a re-sort is required
*/
boolean addReplica(String key) {
int previous = spread.getOrDefault(key, 0);
spread.put(key, previous + 1);
if (allKeys.size() > 0
&& spread.size() == allKeys.size()
&& previous == minReplicasLocated) {
minReplicasLocated = spread.values().stream().mapToInt(Integer::intValue).min().orElse(0);
return true;
}
return false;
}
void removeReplica(String key) {
Integer replicasLocated = spread.computeIfPresent(key, (k, v) -> v - 1 == 0 ? null : v - 1);
if (replicasLocated == null) {
replicasLocated = 0;
}
if (replicasLocated < minReplicasLocated) {
minReplicasLocated = replicasLocated;
}
}
}
}
}
|
googleapis/google-cloud-java | 35,274 | java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/UpdateFirewallPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto
// Protobuf Java Version: 3.25.8
package com.google.recaptchaenterprise.v1;
/**
*
*
* <pre>
* The update firewall policy request message.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest}
*/
public final class UpdateFirewallPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest)
UpdateFirewallPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateFirewallPolicyRequest.newBuilder() to construct.
private UpdateFirewallPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateFirewallPolicyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateFirewallPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UpdateFirewallPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UpdateFirewallPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.class,
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int FIREWALL_POLICY_FIELD_NUMBER = 1;
private com.google.recaptchaenterprise.v1.FirewallPolicy firewallPolicy_;
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the firewallPolicy field is set.
*/
@java.lang.Override
public boolean hasFirewallPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The firewallPolicy.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicy getFirewallPolicy() {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder getFirewallPolicyOrBuilder() {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getFirewallPolicy());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getFirewallPolicy());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest)) {
return super.equals(obj);
}
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest other =
(com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest) obj;
if (hasFirewallPolicy() != other.hasFirewallPolicy()) return false;
if (hasFirewallPolicy()) {
if (!getFirewallPolicy().equals(other.getFirewallPolicy())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasFirewallPolicy()) {
hash = (37 * hash) + FIREWALL_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getFirewallPolicy().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The update firewall policy request message.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest)
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UpdateFirewallPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UpdateFirewallPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.class,
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.Builder.class);
}
// Construct using com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFirewallPolicyFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
firewallPolicy_ = null;
if (firewallPolicyBuilder_ != null) {
firewallPolicyBuilder_.dispose();
firewallPolicyBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UpdateFirewallPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest
getDefaultInstanceForType() {
return com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest build() {
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest buildPartial() {
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest result =
new com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.firewallPolicy_ =
firewallPolicyBuilder_ == null ? firewallPolicy_ : firewallPolicyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest) {
return mergeFrom((com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest other) {
if (other
== com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest.getDefaultInstance())
return this;
if (other.hasFirewallPolicy()) {
mergeFirewallPolicy(other.getFirewallPolicy());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getFirewallPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.recaptchaenterprise.v1.FirewallPolicy firewallPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>
firewallPolicyBuilder_;
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the firewallPolicy field is set.
*/
public boolean hasFirewallPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The firewallPolicy.
*/
public com.google.recaptchaenterprise.v1.FirewallPolicy getFirewallPolicy() {
if (firewallPolicyBuilder_ == null) {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
} else {
return firewallPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFirewallPolicy(com.google.recaptchaenterprise.v1.FirewallPolicy value) {
if (firewallPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
firewallPolicy_ = value;
} else {
firewallPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFirewallPolicy(
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder builderForValue) {
if (firewallPolicyBuilder_ == null) {
firewallPolicy_ = builderForValue.build();
} else {
firewallPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeFirewallPolicy(com.google.recaptchaenterprise.v1.FirewallPolicy value) {
if (firewallPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& firewallPolicy_ != null
&& firewallPolicy_
!= com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()) {
getFirewallPolicyBuilder().mergeFrom(value);
} else {
firewallPolicy_ = value;
}
} else {
firewallPolicyBuilder_.mergeFrom(value);
}
if (firewallPolicy_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearFirewallPolicy() {
bitField0_ = (bitField0_ & ~0x00000001);
firewallPolicy_ = null;
if (firewallPolicyBuilder_ != null) {
firewallPolicyBuilder_.dispose();
firewallPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.recaptchaenterprise.v1.FirewallPolicy.Builder getFirewallPolicyBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getFirewallPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder getFirewallPolicyOrBuilder() {
if (firewallPolicyBuilder_ != null) {
return firewallPolicyBuilder_.getMessageOrBuilder();
} else {
return firewallPolicy_ == null
? com.google.recaptchaenterprise.v1.FirewallPolicy.getDefaultInstance()
: firewallPolicy_;
}
}
/**
*
*
* <pre>
* Required. The policy to update.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.FirewallPolicy firewall_policy = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>
getFirewallPolicyFieldBuilder() {
if (firewallPolicyBuilder_ == null) {
firewallPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.recaptchaenterprise.v1.FirewallPolicy,
com.google.recaptchaenterprise.v1.FirewallPolicy.Builder,
com.google.recaptchaenterprise.v1.FirewallPolicyOrBuilder>(
getFirewallPolicy(), getParentForChildren(), isClean());
firewallPolicy_ = null;
}
return firewallPolicyBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields of the policy get updated. If
* the mask is not present, all fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.UpdateFirewallPolicyRequest)
private static final com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest();
}
public static com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateFirewallPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateFirewallPolicyRequest>() {
@java.lang.Override
public UpdateFirewallPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateFirewallPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateFirewallPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UpdateFirewallPolicyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hudi | 35,599 | hudi-common/src/main/java/org/apache/hudi/common/table/timeline/versioning/v1/ActiveTimelineV1.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.common.table.timeline.versioning.v1;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
import org.apache.hudi.avro.model.HoodieCleanerPlan;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.avro.model.HoodieIndexPlan;
import org.apache.hudi.avro.model.HoodieRequestedReplaceMetadata;
import org.apache.hudi.avro.model.HoodieRestorePlan;
import org.apache.hudi.avro.model.HoodieRollbackMetadata;
import org.apache.hudi.avro.model.HoodieRollbackPlan;
import org.apache.hudi.common.model.HoodieCommitMetadata;
import org.apache.hudi.common.model.HoodieReplaceCommitMetadata;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieInstantReader;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.table.timeline.InstantFileNameGenerator;
import org.apache.hudi.common.table.timeline.TableFormatCompletionAction;
import org.apache.hudi.common.table.timeline.TimelineUtils;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.storage.HoodieInstantWriter;
import org.apache.hudi.storage.HoodieStorage;
import org.apache.hudi.storage.StoragePath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
import static org.apache.hudi.common.table.timeline.TimelineUtils.getHoodieInstantWriterOption;
public class ActiveTimelineV1 extends BaseTimelineV1 implements HoodieActiveTimeline {
public static final Set<String> VALID_EXTENSIONS_IN_ACTIVE_TIMELINE = new HashSet<>(Arrays.asList(
COMMIT_EXTENSION, INFLIGHT_COMMIT_EXTENSION, REQUESTED_COMMIT_EXTENSION,
DELTA_COMMIT_EXTENSION, INFLIGHT_DELTA_COMMIT_EXTENSION, REQUESTED_DELTA_COMMIT_EXTENSION,
SAVEPOINT_EXTENSION, INFLIGHT_SAVEPOINT_EXTENSION,
CLEAN_EXTENSION, REQUESTED_CLEAN_EXTENSION, INFLIGHT_CLEAN_EXTENSION,
INFLIGHT_COMPACTION_EXTENSION, REQUESTED_COMPACTION_EXTENSION,
REQUESTED_RESTORE_EXTENSION, INFLIGHT_RESTORE_EXTENSION, RESTORE_EXTENSION,
INFLIGHT_LOG_COMPACTION_EXTENSION, REQUESTED_LOG_COMPACTION_EXTENSION,
ROLLBACK_EXTENSION, REQUESTED_ROLLBACK_EXTENSION, INFLIGHT_ROLLBACK_EXTENSION,
REQUESTED_REPLACE_COMMIT_EXTENSION, INFLIGHT_REPLACE_COMMIT_EXTENSION, REPLACE_COMMIT_EXTENSION,
REQUESTED_INDEX_COMMIT_EXTENSION, INFLIGHT_INDEX_COMMIT_EXTENSION, INDEX_COMMIT_EXTENSION,
REQUESTED_SAVE_SCHEMA_ACTION_EXTENSION, INFLIGHT_SAVE_SCHEMA_ACTION_EXTENSION, SAVE_SCHEMA_ACTION_EXTENSION));
private static final Logger LOG = LoggerFactory.getLogger(ActiveTimelineV1.class);
protected HoodieTableMetaClient metaClient;
private final InstantFileNameGenerator instantFileNameGenerator = new InstantFileNameGeneratorV1();
protected ActiveTimelineV1(HoodieTableMetaClient metaClient, Set<String> includedExtensions,
boolean applyLayoutFilters) {
// Filter all the filter in the metapath and include only the extensions passed and
// convert them into HoodieInstant
this.setInstants(getInstantsFromFileSystem(metaClient, includedExtensions, applyLayoutFilters));
this.metaClient = metaClient;
// multiple casts will make this lambda serializable -
// http://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.16
LOG.debug("Loaded instants upto : " + lastInstant());
}
public ActiveTimelineV1(HoodieTableMetaClient metaClient) {
this(metaClient, Collections.unmodifiableSet(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE), true);
}
public ActiveTimelineV1(HoodieTableMetaClient metaClient, boolean applyLayoutFilter) {
this(metaClient, Collections.unmodifiableSet(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE), applyLayoutFilter);
}
/**
* For serialization and de-serialization only.
*
* @deprecated
*/
@Deprecated
public ActiveTimelineV1() {
}
/**
* This method is only used when this object is deserialized in a spark executor.
*
* @deprecated
*/
@Deprecated
private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
}
@Override
public Set<String> getValidExtensionsInActiveTimeline() {
return Collections.unmodifiableSet(VALID_EXTENSIONS_IN_ACTIVE_TIMELINE);
}
@Override
public void createCompleteInstant(HoodieInstant instant) {
LOG.info("Creating a new complete instant {}", instant);
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.empty(), false);
}
@Override
public void createNewInstant(HoodieInstant instant) {
LOG.info("Creating a new instant {}", instant);
// Create the in-flight file
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.empty(), false);
}
@Override
public HoodieInstant createRequestedCommitWithReplaceMetadata(String instantTime, String actionType) {
HoodieInstant instant = instantGenerator.createNewInstant(HoodieInstant.State.REQUESTED, actionType, instantTime);
LOG.info("Creating a new instant {}", instant);
// Create the request replace file
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(new HoodieRequestedReplaceMetadata()), false);
return instant;
}
@Override
public <T> HoodieInstant saveAsComplete(HoodieInstant instant, Option<T> metadata) {
LOG.info("Marking instant complete " + instant);
ValidationUtils.checkArgument(instant.isInflight(),
"Could not mark an already completed instant as complete again " + instant);
HoodieInstant completedInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, instant.getAction(), instant.requestedTime());
transitionState(instant, completedInstant, metadata);
LOG.info("Completed {}", instant);
return completedInstant;
}
@Override
public <T> HoodieInstant saveAsComplete(boolean shouldLock, HoodieInstant instant, Option<T> metadata) {
return saveAsComplete(instant, metadata);
}
@Override
public <T> HoodieInstant saveAsComplete(boolean shouldLock, HoodieInstant instant, Option<T> metadata, Option<String> completionTimeOpt) {
return saveAsComplete(instant, metadata);
}
@Override
public <T> HoodieInstant saveAsComplete(boolean shouldLock, HoodieInstant instant, Option<T> metadata, TableFormatCompletionAction tableFormatCompletionAction) {
HoodieInstant completedInstant = saveAsComplete(shouldLock, instant, metadata);
tableFormatCompletionAction.execute(completedInstant);
return completedInstant;
}
@Override
public HoodieInstant revertToInflight(HoodieInstant instant) {
LOG.info("Reverting instant to inflight {}", instant);
HoodieInstant inflight = TimelineUtils.getInflightInstant(instant, metaClient);
revertCompleteToInflight(instant, inflight);
LOG.info("Reverted {} to inflight {}", instant, inflight);
return inflight;
}
@Override
public void deleteInflight(HoodieInstant instant) {
ValidationUtils.checkArgument(instant.isInflight());
deleteInstantFile(instant);
}
@Override
public void deletePending(HoodieInstant instant) {
ValidationUtils.checkArgument(!instant.isCompleted());
deleteInstantFile(instant);
}
@Override
public void deleteCompletedRollback(HoodieInstant instant) {
ValidationUtils.checkArgument(instant.isCompleted());
deleteInstantFile(instant);
}
@Override
public void deleteEmptyInstantIfExists(HoodieInstant instant) {
ValidationUtils.checkArgument(isEmpty(instant));
deleteInstantFileIfExists(instant);
}
@Override
public void deleteCompactionRequested(HoodieInstant instant) {
ValidationUtils.checkArgument(instant.isRequested());
ValidationUtils.checkArgument(Objects.equals(instant.getAction(), HoodieTimeline.COMPACTION_ACTION));
deleteInstantFile(instant);
}
@Override
public void deleteInstantFileIfExists(HoodieInstant instant) {
LOG.info("Deleting instant {}", instant);
StoragePath commitFilePath = getInstantFileNamePath(instantFileNameGenerator.getFileName(instant));
try {
if (metaClient.getStorage().exists(commitFilePath)) {
boolean result = metaClient.getStorage().deleteFile(commitFilePath);
if (result) {
LOG.info("Removed instant {}", instant);
} else {
throw new HoodieIOException("Could not delete instant " + instant + " with path " + commitFilePath);
}
} else {
LOG.info("The commit {} to remove does not exist", commitFilePath);
}
} catch (IOException e) {
throw new HoodieIOException("Could not remove commit " + commitFilePath, e);
}
}
private void deleteInstantFile(HoodieInstant instant) {
LOG.info("Deleting instant {}", instant);
StoragePath inFlightCommitFilePath = getInstantFileNamePath(instantFileNameGenerator.getFileName(instant));
try {
boolean result = metaClient.getStorage().deleteFile(inFlightCommitFilePath);
if (result) {
LOG.info("Removed instant {}", instant);
} else {
throw new HoodieIOException("Could not delete instant " + instant + " with path " + inFlightCommitFilePath);
}
} catch (IOException e) {
throw new HoodieIOException("Could not remove inflight commit " + inFlightCommitFilePath, e);
}
}
@Override
public Option<byte[]> getInstantDetails(HoodieInstant instant) {
StoragePath detailPath = getInstantFileNamePath(instantFileNameGenerator.getFileName(instant));
return readDataFromPath(detailPath);
}
protected Option<byte[]> readDataFromPath(StoragePath filePath) {
try (InputStream inputStream = readDataStreamFromPath(filePath)) {
return Option.of(FileIOUtils.readAsByteArray(inputStream));
} catch (IOException ex) {
throw new HoodieIOException("Could not read commit details from " + filePath, ex);
}
}
@Override
public InputStream getContentStream(HoodieInstant instant) {
StoragePath filePath = getInstantFileNamePath(instantFileNameGenerator.getFileName(instant));
return readDataStreamFromPath(filePath);
}
@Override
public HoodieInstantReader getInstantReader() {
return this;
}
@Override
public Option<Pair<HoodieInstant, HoodieCommitMetadata>> getLastCommitMetadataWithValidSchema() {
return Option.fromJavaOptional(
getCommitMetadataStream()
.filter(instantCommitMetadataPair ->
WriteOperationType.canUpdateSchema(instantCommitMetadataPair.getRight().getOperationType())
&& !StringUtils.isNullOrEmpty(instantCommitMetadataPair.getValue().getMetadata(HoodieCommitMetadata.SCHEMA_KEY)))
.findFirst()
);
}
@Override
public Option<Pair<HoodieInstant, HoodieCommitMetadata>> getLastCommitMetadataWithValidData() {
return Option.fromJavaOptional(
getCommitMetadataStream()
.filter(instantCommitMetadataPair ->
!instantCommitMetadataPair.getValue().getFileIdAndRelativePaths().isEmpty())
.findFirst()
);
}
private Stream<Pair<HoodieInstant, HoodieCommitMetadata>> getCommitMetadataStream() {
// NOTE: Streams are lazy
return getCommitsTimeline().filterCompletedInstants()
.getInstantsAsStream()
.sorted(Comparator.comparing(HoodieInstant::requestedTime).reversed())
.map(instant -> {
try {
HoodieCommitMetadata commitMetadata = readCommitMetadata(instant);
return Pair.of(instant, commitMetadata);
} catch (IOException e) {
throw new HoodieIOException(String.format("Failed to fetch HoodieCommitMetadata for instant (%s)", instant), e);
}
});
}
@Override
public Option<byte[]> readCleanerInfoAsBytes(HoodieInstant instant) {
// Cleaner metadata are always stored only in timeline .hoodie
return readDataFromPath(getInstantFileNamePath(instantFileNameGenerator.getFileName(instant)));
}
//-----------------------------------------------------------------
// BEGIN - COMPACTION RELATED META-DATA MANAGEMENT.
//-----------------------------------------------------------------
@Override
public Option<byte[]> readCompactionPlanAsBytes(HoodieInstant instant) {
return readDataFromPath(new StoragePath(metaClient.getTimelinePath(), instantFileNameGenerator.getFileName(instant)));
}
@Override
public HoodieInstant revertInstantFromInflightToRequested(HoodieInstant inflightInstant) {
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant requestedInstant =
instantGenerator.createNewInstant(HoodieInstant.State.REQUESTED, inflightInstant.getAction(), inflightInstant.requestedTime());
if (metaClient.getTimelineLayoutVersion().isNullVersion()) {
// Pass empty data since it is read from the corresponding .aux/.compaction instant file
transitionState(inflightInstant, requestedInstant, Option.empty());
} else {
deleteInflight(inflightInstant);
}
return requestedInstant;
}
@Override
public HoodieInstant revertLogCompactionInflightToRequested(HoodieInstant inflightInstant) {
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.LOG_COMPACTION_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant requestedInstant =
instantGenerator.createNewInstant(HoodieInstant.State.REQUESTED, LOG_COMPACTION_ACTION, inflightInstant.requestedTime());
if (metaClient.getTimelineLayoutVersion().isNullVersion()) {
// Pass empty data since it is read from the corresponding .aux/.compaction instant file
transitionState(inflightInstant, requestedInstant, Option.empty());
} else {
deleteInflight(inflightInstant);
}
return requestedInstant;
}
@Override
public HoodieInstant transitionCompactionRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.COMPACTION_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested());
HoodieInstant inflightInstant =
instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, COMPACTION_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflightInstant, Option.empty());
return inflightInstant;
}
@Override
public HoodieInstant transitionLogCompactionRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.LOG_COMPACTION_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested());
HoodieInstant inflightInstant =
instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, LOG_COMPACTION_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflightInstant, Option.empty());
return inflightInstant;
}
@Override
public HoodieInstant transitionCompactionInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieCommitMetadata metadata) {
// Lock is not honored in 0.x mode.
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.COMPACTION_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant commitInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, COMMIT_ACTION, inflightInstant.requestedTime());
transitionState(inflightInstant, commitInstant, Option.of(metadata));
return commitInstant;
}
@Override
public HoodieInstant transitionLogCompactionInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieCommitMetadata metadata) {
// Lock is not honored in 0.x mode.
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.LOG_COMPACTION_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant commitInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, DELTA_COMMIT_ACTION, inflightInstant.requestedTime());
transitionState(inflightInstant, commitInstant, Option.of(metadata));
return commitInstant;
}
//-----------------------------------------------------------------
// END - COMPACTION RELATED META-DATA MANAGEMENT
//-----------------------------------------------------------------
@Override
public HoodieInstant transitionCleanInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, Option<HoodieCleanMetadata> metadata) {
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.CLEAN_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant commitInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, CLEAN_ACTION, inflightInstant.requestedTime());
// Then write to timeline
transitionState(inflightInstant, commitInstant, metadata);
return commitInstant;
}
@Override
public HoodieInstant transitionCleanInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, Option<HoodieCleanMetadata> metadata,
TableFormatCompletionAction tableFormatCompletionAction) {
HoodieInstant completedInstant = transitionCleanInflightToComplete(shouldLock, inflightInstant, metadata);
tableFormatCompletionAction.execute(completedInstant);
return completedInstant;
}
@Override
public HoodieInstant transitionCleanRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.CLEAN_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested());
HoodieInstant inflight = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, CLEAN_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflight, Option.empty());
return inflight;
}
@Override
public HoodieInstant transitionRollbackInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieRollbackMetadata metadata) {
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.ROLLBACK_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant commitInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, ROLLBACK_ACTION, inflightInstant.requestedTime());
// Then write to timeline
transitionState(inflightInstant, commitInstant, Option.of(metadata));
return commitInstant;
}
@Override
public HoodieInstant transitionRollbackInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieRollbackMetadata metadata,
TableFormatCompletionAction tableFormatCompletionAction) {
HoodieInstant completedInstant = transitionRollbackInflightToComplete(shouldLock, inflightInstant, metadata);
tableFormatCompletionAction.execute(completedInstant);
return completedInstant;
}
@Override
public HoodieInstant transitionRollbackRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.ROLLBACK_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested());
HoodieInstant inflight = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, ROLLBACK_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflight, Option.empty());
return inflight;
}
@Override
public HoodieInstant transitionRestoreRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.RESTORE_ACTION), "Transition to inflight requested for a restore instant with diff action "
+ requestedInstant);
ValidationUtils.checkArgument(requestedInstant.isRequested(), "Transition to inflight requested for an instant not in requested state " + requestedInstant);
HoodieInstant inflight = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, RESTORE_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflight, Option.empty());
return inflight;
}
@Override
public <T> HoodieInstant transitionReplaceRequestedToInflight(HoodieInstant requestedInstant, Option<T> metadata) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.REPLACE_COMMIT_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested());
HoodieInstant inflightInstant = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, REPLACE_COMMIT_ACTION, requestedInstant.requestedTime());
// Then write to timeline
transitionState(requestedInstant, inflightInstant, metadata);
return inflightInstant;
}
@Override
public <T> HoodieInstant transitionClusterRequestedToInflight(HoodieInstant requestedInstant, Option<T> metadata) {
// In 0.x, no separate clustering action, reuse replace action.
return transitionReplaceRequestedToInflight(requestedInstant, metadata);
}
@Override
public HoodieInstant transitionReplaceInflightToComplete(
boolean shouldLock, HoodieInstant inflightInstant, HoodieReplaceCommitMetadata metadata) {
ValidationUtils.checkArgument(inflightInstant.getAction().equals(HoodieTimeline.REPLACE_COMMIT_ACTION));
ValidationUtils.checkArgument(inflightInstant.isInflight());
HoodieInstant commitInstant = instantGenerator.createNewInstant(HoodieInstant.State.COMPLETED, REPLACE_COMMIT_ACTION, inflightInstant.requestedTime());
// Then write to timeline
transitionState(inflightInstant, commitInstant, Option.of(metadata));
return commitInstant;
}
@Override
public HoodieInstant transitionReplaceInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieReplaceCommitMetadata metadata,
TableFormatCompletionAction tableFormatCompletionAction) {
HoodieInstant completedInstant = transitionReplaceInflightToComplete(shouldLock, inflightInstant, metadata);
tableFormatCompletionAction.execute(completedInstant);
return completedInstant;
}
@Override
public HoodieInstant transitionClusterInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieReplaceCommitMetadata metadata) {
// In 0.x, no separate clustering action, reuse replace action.
return transitionReplaceInflightToComplete(shouldLock, inflightInstant, metadata);
}
@Override
public HoodieInstant transitionClusterInflightToComplete(boolean shouldLock, HoodieInstant inflightInstant, HoodieReplaceCommitMetadata metadata,
TableFormatCompletionAction tableFormatCompletionAction) {
HoodieInstant completedInstant = transitionClusterInflightToComplete(shouldLock, inflightInstant, metadata);
tableFormatCompletionAction.execute(completedInstant);
return completedInstant;
}
private <T> void transitionState(HoodieInstant fromInstant, HoodieInstant toInstant, Option<T> metadata) {
transitionState(fromInstant, toInstant, metadata, false);
}
protected <T> void transitionState(HoodieInstant fromInstant, HoodieInstant toInstant, Option<T> metadata, boolean allowRedundantTransitions) {
ValidationUtils.checkArgument(fromInstant.requestedTime().equals(toInstant.requestedTime()), String.format("%s and %s are not consistent when transition state.", fromInstant, toInstant));
try {
HoodieStorage storage = metaClient.getStorage();
if (metaClient.getTimelineLayoutVersion().isNullVersion()) {
// Re-create the .inflight file by opening a new file and write the commit metadata in
createFileInMetaPath(instantFileNameGenerator.getFileName(fromInstant), metadata, allowRedundantTransitions);
StoragePath fromInstantPath = getInstantFileNamePath(instantFileNameGenerator.getFileName(fromInstant));
StoragePath toInstantPath = getInstantFileNamePath(instantFileNameGenerator.getFileName(toInstant));
boolean success = storage.rename(fromInstantPath, toInstantPath);
if (!success) {
throw new HoodieIOException("Could not rename " + fromInstantPath + " to " + toInstantPath);
}
} else {
// Ensures old state exists in timeline
ValidationUtils.checkArgument(storage.exists(getInstantFileNamePath(instantFileNameGenerator.getFileName(fromInstant))),
"File " + getInstantFileNamePath(instantFileNameGenerator.getFileName(fromInstant)) + " does not exist!");
// Use Write Once to create Target File
if (allowRedundantTransitions) {
FileIOUtils.createFileInPath(storage, getInstantFileNamePath(instantFileNameGenerator.getFileName(toInstant)), getHoodieInstantWriterOption(this, metadata));
} else {
storage.createImmutableFileInPath(getInstantFileNamePath(instantFileNameGenerator.getFileName(toInstant)), getHoodieInstantWriterOption(this, metadata));
}
LOG.info("Create new file for toInstant ?{}", getInstantFileNamePath(instantFileNameGenerator.getFileName(toInstant)));
}
} catch (IOException e) {
throw new HoodieIOException("Could not complete " + fromInstant, e);
}
}
protected void revertCompleteToInflight(HoodieInstant completed, HoodieInstant inflight) {
ValidationUtils.checkArgument(completed.requestedTime().equals(inflight.requestedTime()));
StoragePath inFlightCommitFilePath = getInstantFileNamePath(instantFileNameGenerator.getFileName(inflight));
StoragePath commitFilePath = getInstantFileNamePath(instantFileNameGenerator.getFileName(completed));
try {
if (metaClient.getTimelineLayoutVersion().isNullVersion()) {
if (!metaClient.getStorage().exists(inFlightCommitFilePath)) {
boolean success = metaClient.getStorage().rename(commitFilePath, inFlightCommitFilePath);
if (!success) {
throw new HoodieIOException(
"Could not rename " + commitFilePath + " to " + inFlightCommitFilePath);
}
}
} else {
StoragePath requestedInstantFilePath = getInstantFileNamePath(
instantFileNameGenerator.getFileName(instantGenerator.createNewInstant(HoodieInstant.State.REQUESTED,
inflight.getAction(), inflight.requestedTime())));
// If inflight and requested files do not exist, create one
if (!metaClient.getStorage().exists(requestedInstantFilePath)) {
metaClient.getStorage().create(requestedInstantFilePath, false).close();
}
if (!metaClient.getStorage().exists(inFlightCommitFilePath)) {
metaClient.getStorage().create(inFlightCommitFilePath, false).close();
}
boolean success = metaClient.getStorage().deleteFile(commitFilePath);
ValidationUtils.checkArgument(success, "State Reverting failed");
}
} catch (IOException e) {
throw new HoodieIOException("Could not complete revert " + completed, e);
}
}
private StoragePath getInstantFileNamePath(String fileName) {
return new StoragePath(fileName.contains(SCHEMA_COMMIT_ACTION)
? metaClient.getSchemaFolderName() : metaClient.getTimelinePath().toString(), fileName);
}
@Override
public void transitionRequestedToInflight(String commitType, String inFlightInstant) {
HoodieInstant requested = instantGenerator.createNewInstant(HoodieInstant.State.REQUESTED, commitType, inFlightInstant);
transitionRequestedToInflight(requested, Option.empty(), false);
}
@Override
public <T> void transitionRequestedToInflight(HoodieInstant requested, Option<T> metadata) {
transitionRequestedToInflight(requested, metadata, false);
}
@Override
public <T> void transitionRequestedToInflight(
HoodieInstant requested, Option<T> metadata, boolean allowRedundantTransitions) {
HoodieInstant inflight = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, requested.getAction(), requested.requestedTime());
ValidationUtils.checkArgument(requested.isRequested(), "Instant " + requested + " in wrong state");
transitionState(requested, inflight, metadata, allowRedundantTransitions);
}
@Override
public void saveToCompactionRequested(HoodieInstant instant, HoodieCompactionPlan metadata) {
saveToCompactionRequested(instant, metadata, false);
}
@Override
public void saveToCompactionRequested(HoodieInstant instant, HoodieCompactionPlan metadata, boolean overwrite) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.COMPACTION_ACTION));
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), overwrite);
}
@Override
public void saveToLogCompactionRequested(HoodieInstant instant, HoodieCompactionPlan metadata) {
saveToLogCompactionRequested(instant, metadata, false);
}
@Override
public void saveToLogCompactionRequested(HoodieInstant instant, HoodieCompactionPlan metadata, boolean overwrite) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.LOG_COMPACTION_ACTION));
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), overwrite);
}
@Override
public void saveToPendingReplaceCommit(HoodieInstant instant, HoodieRequestedReplaceMetadata metadata) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.REPLACE_COMMIT_ACTION));
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), false);
}
@Override
public void saveToPendingClusterCommit(HoodieInstant instant, HoodieRequestedReplaceMetadata metadata) {
// In 0.x, no separate clustering action, reuse replace action.
saveToPendingReplaceCommit(instant, metadata);
}
@Override
public void saveToCleanRequested(HoodieInstant instant, Option<HoodieCleanerPlan> metadata) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.CLEAN_ACTION));
ValidationUtils.checkArgument(instant.getState().equals(HoodieInstant.State.REQUESTED));
// Plan is stored in meta path
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), metadata, false);
}
@Override
public void saveToRollbackRequested(HoodieInstant instant, HoodieRollbackPlan metadata) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.ROLLBACK_ACTION));
ValidationUtils.checkArgument(instant.getState().equals(HoodieInstant.State.REQUESTED));
// Plan is stored in meta path
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), false);
}
@Override
public void saveToRestoreRequested(HoodieInstant instant, HoodieRestorePlan metadata) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.RESTORE_ACTION));
ValidationUtils.checkArgument(instant.getState().equals(HoodieInstant.State.REQUESTED));
// Plan is stored in meta path
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), false);
}
@Override
public HoodieInstant transitionIndexRequestedToInflight(HoodieInstant requestedInstant) {
ValidationUtils.checkArgument(requestedInstant.getAction().equals(HoodieTimeline.INDEXING_ACTION),
String.format("%s is not equal to %s action", requestedInstant.getAction(), INDEXING_ACTION));
ValidationUtils.checkArgument(requestedInstant.isRequested(),
String.format("Instant %s not in requested state", requestedInstant.requestedTime()));
HoodieInstant inflightInstant = instantGenerator.createNewInstant(HoodieInstant.State.INFLIGHT, INDEXING_ACTION, requestedInstant.requestedTime());
transitionState(requestedInstant, inflightInstant, Option.empty());
return inflightInstant;
}
@Override
public void saveToPendingIndexAction(HoodieInstant instant, HoodieIndexPlan metadata) {
ValidationUtils.checkArgument(instant.getAction().equals(HoodieTimeline.INDEXING_ACTION),
String.format("%s is not equal to %s action", instant.getAction(), INDEXING_ACTION));
createFileInMetaPath(instantFileNameGenerator.getFileName(instant), Option.of(metadata), false);
}
public <T> void createFileInMetaPath(String filename, Option<T> metadata, boolean allowOverwrite) {
StoragePath fullPath = getInstantFileNamePath(filename);
Option<HoodieInstantWriter> writerOption = getHoodieInstantWriterOption(this, metadata);
if (allowOverwrite || metaClient.getTimelineLayoutVersion().isNullVersion()) {
FileIOUtils.createFileInPath(metaClient.getStorage(metaClient.getTimelinePath()), fullPath, writerOption);
} else {
metaClient.getStorage(metaClient.getTimelinePath()).createImmutableFileInPath(fullPath, writerOption);
}
}
protected InputStream readDataStreamFromPath(StoragePath filePath) {
try {
return metaClient.getStorage().open(filePath);
} catch (IOException e) {
throw new HoodieIOException("Could not read commit details from " + filePath, e);
}
}
@Override
public HoodieActiveTimeline reload() {
return new ActiveTimelineV1(metaClient);
}
@Override
public void copyInstant(HoodieInstant instant, StoragePath dstDir) {
StoragePath srcPath = new StoragePath(metaClient.getTimelinePath(), instantFileNameGenerator.getFileName(instant));
StoragePath dstPath = new StoragePath(dstDir, instantFileNameGenerator.getFileName(instant));
try {
HoodieStorage storage = metaClient.getStorage();
storage.createDirectory(dstDir);
FileIOUtils.copy(storage, srcPath, storage, dstPath, false, true);
} catch (IOException e) {
throw new HoodieIOException("Could not copy instant from " + srcPath + " to " + dstPath, e);
}
}
@Override
public Set<String> getValidExtensions() {
return Collections.emptySet();
}
@Override
public boolean isEmpty(HoodieInstant instant) {
return TimelineUtils.isEmpty(metaClient, instant);
}
}
|
googleapis/google-cloud-java | 35,184 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ReadIndexDatapointsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/match_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* The request message for
* [MatchService.ReadIndexDatapoints][google.cloud.aiplatform.v1beta1.MatchService.ReadIndexDatapoints].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest}
*/
public final class ReadIndexDatapointsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest)
ReadIndexDatapointsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReadIndexDatapointsRequest.newBuilder() to construct.
private ReadIndexDatapointsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ReadIndexDatapointsRequest() {
indexEndpoint_ = "";
deployedIndexId_ = "";
ids_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ReadIndexDatapointsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.MatchServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ReadIndexDatapointsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.MatchServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ReadIndexDatapointsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.class,
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.Builder.class);
}
public static final int INDEX_ENDPOINT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object indexEndpoint_ = "";
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The indexEndpoint.
*/
@java.lang.Override
public java.lang.String getIndexEndpoint() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
indexEndpoint_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for indexEndpoint.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIndexEndpointBytes() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
indexEndpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DEPLOYED_INDEX_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object deployedIndexId_ = "";
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @return The deployedIndexId.
*/
@java.lang.Override
public java.lang.String getDeployedIndexId() {
java.lang.Object ref = deployedIndexId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deployedIndexId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @return The bytes for deployedIndexId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDeployedIndexIdBytes() {
java.lang.Object ref = deployedIndexId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deployedIndexId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int IDS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList ids_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @return A list containing the ids.
*/
public com.google.protobuf.ProtocolStringList getIdsList() {
return ids_;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @return The count of ids.
*/
public int getIdsCount() {
return ids_.size();
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param index The index of the element to return.
* @return The ids at the given index.
*/
public java.lang.String getIds(int index) {
return ids_.get(index);
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the ids at the given index.
*/
public com.google.protobuf.ByteString getIdsBytes(int index) {
return ids_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexEndpoint_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, indexEndpoint_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployedIndexId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, deployedIndexId_);
}
for (int i = 0; i < ids_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, ids_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexEndpoint_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, indexEndpoint_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployedIndexId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, deployedIndexId_);
}
{
int dataSize = 0;
for (int i = 0; i < ids_.size(); i++) {
dataSize += computeStringSizeNoTag(ids_.getRaw(i));
}
size += dataSize;
size += 1 * getIdsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest other =
(com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest) obj;
if (!getIndexEndpoint().equals(other.getIndexEndpoint())) return false;
if (!getDeployedIndexId().equals(other.getDeployedIndexId())) return false;
if (!getIdsList().equals(other.getIdsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INDEX_ENDPOINT_FIELD_NUMBER;
hash = (53 * hash) + getIndexEndpoint().hashCode();
hash = (37 * hash) + DEPLOYED_INDEX_ID_FIELD_NUMBER;
hash = (53 * hash) + getDeployedIndexId().hashCode();
if (getIdsCount() > 0) {
hash = (37 * hash) + IDS_FIELD_NUMBER;
hash = (53 * hash) + getIdsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [MatchService.ReadIndexDatapoints][google.cloud.aiplatform.v1beta1.MatchService.ReadIndexDatapoints].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest)
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.MatchServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ReadIndexDatapointsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.MatchServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ReadIndexDatapointsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.class,
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
indexEndpoint_ = "";
deployedIndexId_ = "";
ids_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.MatchServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ReadIndexDatapointsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest build() {
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest result =
new com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.indexEndpoint_ = indexEndpoint_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.deployedIndexId_ = deployedIndexId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
ids_.makeImmutable();
result.ids_ = ids_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest.getDefaultInstance())
return this;
if (!other.getIndexEndpoint().isEmpty()) {
indexEndpoint_ = other.indexEndpoint_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDeployedIndexId().isEmpty()) {
deployedIndexId_ = other.deployedIndexId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.ids_.isEmpty()) {
if (ids_.isEmpty()) {
ids_ = other.ids_;
bitField0_ |= 0x00000004;
} else {
ensureIdsIsMutable();
ids_.addAll(other.ids_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
indexEndpoint_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
deployedIndexId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensureIdsIsMutable();
ids_.add(s);
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object indexEndpoint_ = "";
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The indexEndpoint.
*/
public java.lang.String getIndexEndpoint() {
java.lang.Object ref = indexEndpoint_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
indexEndpoint_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for indexEndpoint.
*/
public com.google.protobuf.ByteString getIndexEndpointBytes() {
java.lang.Object ref = indexEndpoint_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
indexEndpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The indexEndpoint to set.
* @return This builder for chaining.
*/
public Builder setIndexEndpoint(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
indexEndpoint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearIndexEndpoint() {
indexEndpoint_ = getDefaultInstance().getIndexEndpoint();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the index endpoint.
* Format:
* `projects/{project}/locations/{location}/indexEndpoints/{index_endpoint}`
* </pre>
*
* <code>
* string index_endpoint = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for indexEndpoint to set.
* @return This builder for chaining.
*/
public Builder setIndexEndpointBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
indexEndpoint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object deployedIndexId_ = "";
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @return The deployedIndexId.
*/
public java.lang.String getDeployedIndexId() {
java.lang.Object ref = deployedIndexId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deployedIndexId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @return The bytes for deployedIndexId.
*/
public com.google.protobuf.ByteString getDeployedIndexIdBytes() {
java.lang.Object ref = deployedIndexId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deployedIndexId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @param value The deployedIndexId to set.
* @return This builder for chaining.
*/
public Builder setDeployedIndexId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
deployedIndexId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDeployedIndexId() {
deployedIndexId_ = getDefaultInstance().getDeployedIndexId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID of the DeployedIndex that will serve the request.
* </pre>
*
* <code>string deployed_index_id = 2;</code>
*
* @param value The bytes for deployedIndexId to set.
* @return This builder for chaining.
*/
public Builder setDeployedIndexIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
deployedIndexId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList ids_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureIdsIsMutable() {
if (!ids_.isModifiable()) {
ids_ = new com.google.protobuf.LazyStringArrayList(ids_);
}
bitField0_ |= 0x00000004;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @return A list containing the ids.
*/
public com.google.protobuf.ProtocolStringList getIdsList() {
ids_.makeImmutable();
return ids_;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @return The count of ids.
*/
public int getIdsCount() {
return ids_.size();
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param index The index of the element to return.
* @return The ids at the given index.
*/
public java.lang.String getIds(int index) {
return ids_.get(index);
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the ids at the given index.
*/
public com.google.protobuf.ByteString getIdsBytes(int index) {
return ids_.getByteString(index);
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param index The index to set the value at.
* @param value The ids to set.
* @return This builder for chaining.
*/
public Builder setIds(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureIdsIsMutable();
ids_.set(index, value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param value The ids to add.
* @return This builder for chaining.
*/
public Builder addIds(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureIdsIsMutable();
ids_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param values The ids to add.
* @return This builder for chaining.
*/
public Builder addAllIds(java.lang.Iterable<java.lang.String> values) {
ensureIdsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ids_);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearIds() {
ids_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* IDs of the datapoints to be searched for.
* </pre>
*
* <code>repeated string ids = 3;</code>
*
* @param value The bytes of the ids to add.
* @return This builder for chaining.
*/
public Builder addIdsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureIdsIsMutable();
ids_.add(value);
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest)
private static final com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest();
}
public static com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ReadIndexDatapointsRequest> PARSER =
new com.google.protobuf.AbstractParser<ReadIndexDatapointsRequest>() {
@java.lang.Override
public ReadIndexDatapointsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ReadIndexDatapointsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ReadIndexDatapointsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ReadIndexDatapointsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,328 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/CreateConversationModelEvaluationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/conversation_model.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [ConversationModels.CreateConversationModelEvaluation][google.cloud.dialogflow.v2.ConversationModels.CreateConversationModelEvaluation]
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest}
*/
public final class CreateConversationModelEvaluationRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest)
CreateConversationModelEvaluationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateConversationModelEvaluationRequest.newBuilder() to construct.
private CreateConversationModelEvaluationRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateConversationModelEvaluationRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateConversationModelEvaluationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationModelProto
.internal_static_google_cloud_dialogflow_v2_CreateConversationModelEvaluationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationModelProto
.internal_static_google_cloud_dialogflow_v2_CreateConversationModelEvaluationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest.class,
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONVERSATION_MODEL_EVALUATION_FIELD_NUMBER = 2;
private com.google.cloud.dialogflow.v2.ConversationModelEvaluation conversationModelEvaluation_;
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversationModelEvaluation field is set.
*/
@java.lang.Override
public boolean hasConversationModelEvaluation() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversationModelEvaluation.
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.ConversationModelEvaluation
getConversationModelEvaluation() {
return conversationModelEvaluation_ == null
? com.google.cloud.dialogflow.v2.ConversationModelEvaluation.getDefaultInstance()
: conversationModelEvaluation_;
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.ConversationModelEvaluationOrBuilder
getConversationModelEvaluationOrBuilder() {
return conversationModelEvaluation_ == null
? com.google.cloud.dialogflow.v2.ConversationModelEvaluation.getDefaultInstance()
: conversationModelEvaluation_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getConversationModelEvaluation());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getConversationModelEvaluation());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest other =
(com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasConversationModelEvaluation() != other.hasConversationModelEvaluation()) return false;
if (hasConversationModelEvaluation()) {
if (!getConversationModelEvaluation().equals(other.getConversationModelEvaluation()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasConversationModelEvaluation()) {
hash = (37 * hash) + CONVERSATION_MODEL_EVALUATION_FIELD_NUMBER;
hash = (53 * hash) + getConversationModelEvaluation().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [ConversationModels.CreateConversationModelEvaluation][google.cloud.dialogflow.v2.ConversationModels.CreateConversationModelEvaluation]
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest)
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationModelProto
.internal_static_google_cloud_dialogflow_v2_CreateConversationModelEvaluationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationModelProto
.internal_static_google_cloud_dialogflow_v2_CreateConversationModelEvaluationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest.class,
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest.Builder
.class);
}
// Construct using
// com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getConversationModelEvaluationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
conversationModelEvaluation_ = null;
if (conversationModelEvaluationBuilder_ != null) {
conversationModelEvaluationBuilder_.dispose();
conversationModelEvaluationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ConversationModelProto
.internal_static_google_cloud_dialogflow_v2_CreateConversationModelEvaluationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest build() {
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest buildPartial() {
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest result =
new com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.conversationModelEvaluation_ =
conversationModelEvaluationBuilder_ == null
? conversationModelEvaluation_
: conversationModelEvaluationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest) {
return mergeFrom(
(com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest other) {
if (other
== com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasConversationModelEvaluation()) {
mergeConversationModelEvaluation(other.getConversationModelEvaluation());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getConversationModelEvaluationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation model resource name. Format:
* `projects/<Project ID>/locations/<Location
* ID>/conversationModels/<Conversation Model ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.dialogflow.v2.ConversationModelEvaluation conversationModelEvaluation_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.ConversationModelEvaluation,
com.google.cloud.dialogflow.v2.ConversationModelEvaluation.Builder,
com.google.cloud.dialogflow.v2.ConversationModelEvaluationOrBuilder>
conversationModelEvaluationBuilder_;
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversationModelEvaluation field is set.
*/
public boolean hasConversationModelEvaluation() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversationModelEvaluation.
*/
public com.google.cloud.dialogflow.v2.ConversationModelEvaluation
getConversationModelEvaluation() {
if (conversationModelEvaluationBuilder_ == null) {
return conversationModelEvaluation_ == null
? com.google.cloud.dialogflow.v2.ConversationModelEvaluation.getDefaultInstance()
: conversationModelEvaluation_;
} else {
return conversationModelEvaluationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversationModelEvaluation(
com.google.cloud.dialogflow.v2.ConversationModelEvaluation value) {
if (conversationModelEvaluationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
conversationModelEvaluation_ = value;
} else {
conversationModelEvaluationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversationModelEvaluation(
com.google.cloud.dialogflow.v2.ConversationModelEvaluation.Builder builderForValue) {
if (conversationModelEvaluationBuilder_ == null) {
conversationModelEvaluation_ = builderForValue.build();
} else {
conversationModelEvaluationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeConversationModelEvaluation(
com.google.cloud.dialogflow.v2.ConversationModelEvaluation value) {
if (conversationModelEvaluationBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& conversationModelEvaluation_ != null
&& conversationModelEvaluation_
!= com.google.cloud.dialogflow.v2.ConversationModelEvaluation
.getDefaultInstance()) {
getConversationModelEvaluationBuilder().mergeFrom(value);
} else {
conversationModelEvaluation_ = value;
}
} else {
conversationModelEvaluationBuilder_.mergeFrom(value);
}
if (conversationModelEvaluation_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearConversationModelEvaluation() {
bitField0_ = (bitField0_ & ~0x00000002);
conversationModelEvaluation_ = null;
if (conversationModelEvaluationBuilder_ != null) {
conversationModelEvaluationBuilder_.dispose();
conversationModelEvaluationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.ConversationModelEvaluation.Builder
getConversationModelEvaluationBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getConversationModelEvaluationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.v2.ConversationModelEvaluationOrBuilder
getConversationModelEvaluationOrBuilder() {
if (conversationModelEvaluationBuilder_ != null) {
return conversationModelEvaluationBuilder_.getMessageOrBuilder();
} else {
return conversationModelEvaluation_ == null
? com.google.cloud.dialogflow.v2.ConversationModelEvaluation.getDefaultInstance()
: conversationModelEvaluation_;
}
}
/**
*
*
* <pre>
* Required. The conversation model evaluation to be created.
* </pre>
*
* <code>
* .google.cloud.dialogflow.v2.ConversationModelEvaluation conversation_model_evaluation = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.ConversationModelEvaluation,
com.google.cloud.dialogflow.v2.ConversationModelEvaluation.Builder,
com.google.cloud.dialogflow.v2.ConversationModelEvaluationOrBuilder>
getConversationModelEvaluationFieldBuilder() {
if (conversationModelEvaluationBuilder_ == null) {
conversationModelEvaluationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.v2.ConversationModelEvaluation,
com.google.cloud.dialogflow.v2.ConversationModelEvaluation.Builder,
com.google.cloud.dialogflow.v2.ConversationModelEvaluationOrBuilder>(
getConversationModelEvaluation(), getParentForChildren(), isClean());
conversationModelEvaluation_ = null;
}
return conversationModelEvaluationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest)
private static final com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest();
}
public static com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateConversationModelEvaluationRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateConversationModelEvaluationRequest>() {
@java.lang.Override
public CreateConversationModelEvaluationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateConversationModelEvaluationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateConversationModelEvaluationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.CreateConversationModelEvaluationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,509 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/stub/IntentsStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1.stub;
import static com.google.cloud.dialogflow.cx.v3beta1.IntentsClient.ListIntentsPagedResponse;
import static com.google.cloud.dialogflow.cx.v3beta1.IntentsClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3beta1.CreateIntentRequest;
import com.google.cloud.dialogflow.cx.v3beta1.DeleteIntentRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ExportIntentsMetadata;
import com.google.cloud.dialogflow.cx.v3beta1.ExportIntentsRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ExportIntentsResponse;
import com.google.cloud.dialogflow.cx.v3beta1.GetIntentRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ImportIntentsMetadata;
import com.google.cloud.dialogflow.cx.v3beta1.ImportIntentsRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ImportIntentsResponse;
import com.google.cloud.dialogflow.cx.v3beta1.Intent;
import com.google.cloud.dialogflow.cx.v3beta1.ListIntentsRequest;
import com.google.cloud.dialogflow.cx.v3beta1.ListIntentsResponse;
import com.google.cloud.dialogflow.cx.v3beta1.UpdateIntentRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link IntentsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dialogflow.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getIntent:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder();
* intentsSettingsBuilder
* .getIntentSettings()
* .setRetrySettings(
* intentsSettingsBuilder
* .getIntentSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* IntentsStubSettings intentsSettings = intentsSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for importIntents:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* intentsSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class IntentsStubSettings extends StubSettings<IntentsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/dialogflow")
.build();
private final PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>
listIntentsSettings;
private final UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings;
private final UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings;
private final UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings;
private final UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings;
private final UnaryCallSettings<ImportIntentsRequest, Operation> importIntentsSettings;
private final OperationCallSettings<
ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationSettings;
private final UnaryCallSettings<ExportIntentsRequest, Operation> exportIntentsSettings;
private final OperationCallSettings<
ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent>
LIST_INTENTS_PAGE_STR_DESC =
new PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListIntentsRequest injectToken(ListIntentsRequest payload, String token) {
return ListIntentsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListIntentsRequest injectPageSize(ListIntentsRequest payload, int pageSize) {
return ListIntentsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListIntentsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListIntentsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Intent> extractResources(ListIntentsResponse payload) {
return payload.getIntentsList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>
LIST_INTENTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>() {
@Override
public ApiFuture<ListIntentsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListIntentsRequest, ListIntentsResponse> callable,
ListIntentsRequest request,
ApiCallContext context,
ApiFuture<ListIntentsResponse> futureResponse) {
PageContext<ListIntentsRequest, ListIntentsResponse, Intent> pageContext =
PageContext.create(callable, LIST_INTENTS_PAGE_STR_DESC, request, context);
return ListIntentsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listIntents. */
public PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>
listIntentsSettings() {
return listIntentsSettings;
}
/** Returns the object with the settings used for calls to getIntent. */
public UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings() {
return getIntentSettings;
}
/** Returns the object with the settings used for calls to createIntent. */
public UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings() {
return createIntentSettings;
}
/** Returns the object with the settings used for calls to updateIntent. */
public UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings() {
return updateIntentSettings;
}
/** Returns the object with the settings used for calls to deleteIntent. */
public UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings() {
return deleteIntentSettings;
}
/** Returns the object with the settings used for calls to importIntents. */
public UnaryCallSettings<ImportIntentsRequest, Operation> importIntentsSettings() {
return importIntentsSettings;
}
/** Returns the object with the settings used for calls to importIntents. */
public OperationCallSettings<ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationSettings() {
return importIntentsOperationSettings;
}
/** Returns the object with the settings used for calls to exportIntents. */
public UnaryCallSettings<ExportIntentsRequest, Operation> exportIntentsSettings() {
return exportIntentsSettings;
}
/** Returns the object with the settings used for calls to exportIntents. */
public OperationCallSettings<ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationSettings() {
return exportIntentsOperationSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public IntentsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcIntentsStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonIntentsStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "dialogflow";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "dialogflow.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "dialogflow.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return IntentsStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected IntentsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listIntentsSettings = settingsBuilder.listIntentsSettings().build();
getIntentSettings = settingsBuilder.getIntentSettings().build();
createIntentSettings = settingsBuilder.createIntentSettings().build();
updateIntentSettings = settingsBuilder.updateIntentSettings().build();
deleteIntentSettings = settingsBuilder.deleteIntentSettings().build();
importIntentsSettings = settingsBuilder.importIntentsSettings().build();
importIntentsOperationSettings = settingsBuilder.importIntentsOperationSettings().build();
exportIntentsSettings = settingsBuilder.exportIntentsSettings().build();
exportIntentsOperationSettings = settingsBuilder.exportIntentsOperationSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for IntentsStubSettings. */
public static class Builder extends StubSettings.Builder<IntentsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>
listIntentsSettings;
private final UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings;
private final UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings;
private final UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings;
private final UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings;
private final UnaryCallSettings.Builder<ImportIntentsRequest, Operation> importIntentsSettings;
private final OperationCallSettings.Builder<
ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationSettings;
private final UnaryCallSettings.Builder<ExportIntentsRequest, Operation> exportIntentsSettings;
private final OperationCallSettings.Builder<
ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listIntentsSettings = PagedCallSettings.newBuilder(LIST_INTENTS_PAGE_STR_FACT);
getIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
importIntentsOperationSettings = OperationCallSettings.newBuilder();
exportIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
exportIntentsOperationSettings = OperationCallSettings.newBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listIntentsSettings,
getIntentSettings,
createIntentSettings,
updateIntentSettings,
deleteIntentSettings,
importIntentsSettings,
exportIntentsSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(IntentsStubSettings settings) {
super(settings);
listIntentsSettings = settings.listIntentsSettings.toBuilder();
getIntentSettings = settings.getIntentSettings.toBuilder();
createIntentSettings = settings.createIntentSettings.toBuilder();
updateIntentSettings = settings.updateIntentSettings.toBuilder();
deleteIntentSettings = settings.deleteIntentSettings.toBuilder();
importIntentsSettings = settings.importIntentsSettings.toBuilder();
importIntentsOperationSettings = settings.importIntentsOperationSettings.toBuilder();
exportIntentsSettings = settings.exportIntentsSettings.toBuilder();
exportIntentsOperationSettings = settings.exportIntentsOperationSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listIntentsSettings,
getIntentSettings,
createIntentSettings,
updateIntentSettings,
deleteIntentSettings,
importIntentsSettings,
exportIntentsSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listIntentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIntentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createIntentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.updateIntentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteIntentSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.importIntentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.exportIntentsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.importIntentsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<ImportIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ImportIntentsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(ImportIntentsMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.exportIntentsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<ExportIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(ExportIntentsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(ExportIntentsMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listIntents. */
public PagedCallSettings.Builder<
ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>
listIntentsSettings() {
return listIntentsSettings;
}
/** Returns the builder for the settings used for calls to getIntent. */
public UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings() {
return getIntentSettings;
}
/** Returns the builder for the settings used for calls to createIntent. */
public UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings() {
return createIntentSettings;
}
/** Returns the builder for the settings used for calls to updateIntent. */
public UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings() {
return updateIntentSettings;
}
/** Returns the builder for the settings used for calls to deleteIntent. */
public UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings() {
return deleteIntentSettings;
}
/** Returns the builder for the settings used for calls to importIntents. */
public UnaryCallSettings.Builder<ImportIntentsRequest, Operation> importIntentsSettings() {
return importIntentsSettings;
}
/** Returns the builder for the settings used for calls to importIntents. */
public OperationCallSettings.Builder<
ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationSettings() {
return importIntentsOperationSettings;
}
/** Returns the builder for the settings used for calls to exportIntents. */
public UnaryCallSettings.Builder<ExportIntentsRequest, Operation> exportIntentsSettings() {
return exportIntentsSettings;
}
/** Returns the builder for the settings used for calls to exportIntents. */
public OperationCallSettings.Builder<
ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationSettings() {
return exportIntentsOperationSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public IntentsStubSettings build() throws IOException {
return new IntentsStubSettings(this);
}
}
}
|
apache/hadoop | 35,311 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/FileIoProvider.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.io.nativeio.NativeIOException;
import org.apache.hadoop.net.SocketOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.Flushable;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.file.CopyOption;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import static org.apache.hadoop.hdfs.server.datanode.FileIoProvider.OPERATION.*;
/**
* This class abstracts out various file IO operations performed by the
* DataNode and invokes profiling (for collecting stats) and fault injection
* (for testing) event hooks before and after each file IO.
*
* Behavior can be injected into these events by enabling the
* profiling and/or fault injection event hooks through
* {@link DFSConfigKeys#DFS_DATANODE_FILEIO_PROFILING_SAMPLING_PERCENTAGE_KEY}
* and {@link DFSConfigKeys#DFS_DATANODE_ENABLE_FILEIO_FAULT_INJECTION_KEY}.
* These event hooks are disabled by default.
*
* Most functions accept an optional {@link FsVolumeSpi} parameter for
* instrumentation/logging.
*
* Some methods may look redundant, especially the multiple variations of
* move/rename/list. They exist to retain behavior compatibility for existing
* code.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class FileIoProvider {
public static final Logger LOG = LoggerFactory.getLogger(
FileIoProvider.class);
private final ProfilingFileIoEvents profilingEventHook;
private final FaultInjectorFileIoEvents faultInjectorEventHook;
private final DataNode datanode;
private static final int LEN_INT = 4;
/**
* @param conf Configuration object. May be null. When null,
* the event handlers are no-ops.
* @param datanode datanode that owns this FileIoProvider. Used for
* IO error based volume checker callback
*/
public FileIoProvider(@Nullable Configuration conf,
final DataNode datanode) {
profilingEventHook = new ProfilingFileIoEvents(conf);
faultInjectorEventHook = new FaultInjectorFileIoEvents(conf);
this.datanode = datanode;
}
/**
* Lists the types of file system operations. Passed to the
* IO hooks so implementations can choose behavior based on
* specific operations.
*/
public enum OPERATION {
OPEN,
EXISTS,
LIST,
DELETE,
MOVE,
MKDIRS,
TRANSFER,
SYNC,
FADVISE,
READ,
WRITE,
FLUSH,
NATIVE_COPY
}
/**
* See {@link Flushable#flush()}.
*
* @param volume target volume. null if unavailable.
* @throws IOException
*/
public void flush(
@Nullable FsVolumeSpi volume, Flushable f) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, FLUSH, 0);
try {
faultInjectorEventHook.beforeFileIo(volume, FLUSH, 0);
f.flush();
profilingEventHook.afterFileIo(volume, FLUSH, begin, 0);
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Sync the given {@link FileOutputStream}.
*
* @param volume target volume. null if unavailable.
* @throws IOException
*/
public void sync(
@Nullable FsVolumeSpi volume, FileOutputStream fos) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, SYNC, 0);
try {
faultInjectorEventHook.beforeFileIo(volume, SYNC, 0);
IOUtils.fsync(fos.getChannel(), false);
profilingEventHook.afterFileIo(volume, SYNC, begin, 0);
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Sync the given directory changes to durable device.
* @throws IOException
*/
public void dirSync(@Nullable FsVolumeSpi volume, File dir)
throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, SYNC, 0);
try {
faultInjectorEventHook.beforeFileIo(volume, SYNC, 0);
IOUtils.fsync(dir);
profilingEventHook.afterFileIo(volume, SYNC, begin, 0);
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Call sync_file_range on the given file descriptor.
*
* @param volume target volume. null if unavailable.
*/
public void syncFileRange(
@Nullable FsVolumeSpi volume, FileDescriptor outFd,
long offset, long numBytes, int flags) throws NativeIOException {
final long begin = profilingEventHook.beforeFileIo(volume, SYNC, 0);
try {
faultInjectorEventHook.beforeFileIo(volume, SYNC, 0);
NativeIO.POSIX.syncFileRangeIfPossible(outFd, offset, numBytes, flags);
profilingEventHook.afterFileIo(volume, SYNC, begin, 0);
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Call posix_fadvise on the given file descriptor.
*
* @param volume target volume. null if unavailable.
*/
public void posixFadvise(
@Nullable FsVolumeSpi volume, String identifier, FileDescriptor outFd,
long offset, long length, int flags) throws NativeIOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, FADVISE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, FADVISE);
NativeIO.POSIX.getCacheManipulator().posixFadviseIfPossible(
identifier, outFd, offset, length, flags);
profilingEventHook.afterMetadataOp(volume, FADVISE, begin);
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Delete a file.
* @param volume target volume. null if unavailable.
* @param f File to delete.
* @return true if the file was successfully deleted.
*/
public boolean delete(@Nullable FsVolumeSpi volume, File f) {
final long begin = profilingEventHook.beforeMetadataOp(volume, DELETE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, DELETE);
boolean deleted = f.delete();
profilingEventHook.afterMetadataOp(volume, DELETE, begin);
return deleted;
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Delete a file, first checking to see if it exists.
* @param volume target volume. null if unavailable.
* @param f File to delete
* @return true if the file was successfully deleted or if it never
* existed.
*/
public boolean deleteWithExistsCheck(@Nullable FsVolumeSpi volume, File f) {
final long begin = profilingEventHook.beforeMetadataOp(volume, DELETE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, DELETE);
boolean deleted = !f.exists() || f.delete();
profilingEventHook.afterMetadataOp(volume, DELETE, begin);
if (!deleted) {
LOG.warn("Failed to delete file {}", f);
}
return deleted;
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Transfer data from a FileChannel to a SocketOutputStream.
*
* @param volume target volume. null if unavailable.
* @param sockOut SocketOutputStream to write the data.
* @param fileCh FileChannel from which to read data.
* @param position position within the channel where the transfer begins.
* @param count number of bytes to transfer.
* @param waitTime returns the nanoseconds spent waiting for the socket
* to become writable.
* @param transferTime returns the nanoseconds spent transferring data.
* @throws IOException
*/
public void transferToSocketFully(
@Nullable FsVolumeSpi volume, SocketOutputStream sockOut,
FileChannel fileCh, long position, int count,
LongWritable waitTime, LongWritable transferTime) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, TRANSFER, count);
try {
faultInjectorEventHook.beforeFileIo(volume, TRANSFER, count);
sockOut.transferToFully(fileCh, position, count,
waitTime, transferTime);
profilingEventHook.afterFileIo(volume, TRANSFER, begin, count);
} catch (Exception e) {
String em = e.getMessage();
if (em != null) {
if (!em.startsWith("Broken pipe")
&& !em.startsWith("Connection reset")) {
onFailure(volume, begin);
}
} else {
onFailure(volume, begin);
}
throw e;
}
}
/**
* Create a file.
* @param volume target volume. null if unavailable.
* @param f File to be created.
* @return true if the file does not exist and was successfully created.
* false if the file already exists.
* @throws IOException
*/
public boolean createFile(
@Nullable FsVolumeSpi volume, File f) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
boolean created = f.createNewFile();
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return created;
} catch (Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Create a FileInputStream using
* {@link FileInputStream#FileInputStream(File)}.
*
* Wraps the created input stream to intercept read calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @return FileInputStream to the given file.
* @throws FileNotFoundException
*/
public FileInputStream getFileInputStream(
@Nullable FsVolumeSpi volume, File f) throws FileNotFoundException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
FileInputStream fis = null;
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
fis = new WrappedFileInputStream(volume, f);
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
}
/**
* Create a FileOutputStream using
* {@link FileOutputStream#FileOutputStream(File, boolean)}.
*
* Wraps the created output stream to intercept write calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @param append if true, then bytes will be written to the end of the
* file rather than the beginning.
* @return FileOutputStream to the given file object.
* @throws FileNotFoundException
*/
public FileOutputStream getFileOutputStream(
@Nullable FsVolumeSpi volume, File f,
boolean append) throws FileNotFoundException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
FileOutputStream fos = null;
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
fos = new WrappedFileOutputStream(volume, f, append);
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fos;
} catch(Exception e) {
IOUtils.closeStream(fos);
onFailure(volume, begin);
throw e;
}
}
/**
* Create a FileOutputStream using
* {@link FileOutputStream#FileOutputStream(File, boolean)}.
*
* Wraps the created output stream to intercept write calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @return FileOutputStream to the given file object.
* @throws FileNotFoundException
*/
public FileOutputStream getFileOutputStream(
@Nullable FsVolumeSpi volume, File f) throws FileNotFoundException {
return getFileOutputStream(volume, f, false);
}
/**
* Create a FileOutputStream using
* {@link FileOutputStream#FileOutputStream(FileDescriptor)}.
*
* Wraps the created output stream to intercept write calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param fd File descriptor object.
* @return FileOutputStream to the given file object.
*/
public FileOutputStream getFileOutputStream(
@Nullable FsVolumeSpi volume, FileDescriptor fd) {
return new WrappedFileOutputStream(volume, fd);
}
/**
* Create a FileInputStream using
* {@link NativeIO#getShareDeleteFileDescriptor}.
* Wraps the created input stream to intercept input calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @param offset the offset position, measured in bytes from the
* beginning of the file, at which to set the file
* pointer.
* @return FileOutputStream to the given file object.
* @throws FileNotFoundException
*/
public FileInputStream getShareDeleteFileInputStream(
@Nullable FsVolumeSpi volume, File f,
long offset) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
FileInputStream fis = null;
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
fis = new WrappedFileInputStream(volume,
NativeIO.getShareDeleteFileDescriptor(f, offset));
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
}
/**
* Create a FileInputStream using
* {@link FileInputStream#FileInputStream(File)} and position
* it at the given offset.
*
* Wraps the created input stream to intercept read calls
* before delegating to the wrapped stream.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @param offset the offset position, measured in bytes from the
* beginning of the file, at which to set the file
* pointer.
* @throws FileNotFoundException
*/
public FileInputStream openAndSeek(
@Nullable FsVolumeSpi volume, File f, long offset) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
FileInputStream fis = null;
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
fis = new WrappedFileInputStream(volume,
FsDatasetUtil.openAndSeek(f, offset));
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
}
/**
* Create a RandomAccessFile using
* {@link RandomAccessFile#RandomAccessFile(File, String)}.
*
* Wraps the created input stream to intercept IO calls
* before delegating to the wrapped RandomAccessFile.
*
* @param volume target volume. null if unavailable.
* @param f File object.
* @param mode See {@link RandomAccessFile} for a description
* of the mode string.
* @return RandomAccessFile representing the given file.
* @throws FileNotFoundException
*/
public RandomAccessFile getRandomAccessFile(
@Nullable FsVolumeSpi volume, File f,
String mode) throws FileNotFoundException {
final long begin = profilingEventHook.beforeMetadataOp(volume, OPEN);
RandomAccessFile raf = null;
try {
faultInjectorEventHook.beforeMetadataOp(volume, OPEN);
raf = new WrappedRandomAccessFile(volume, f, mode);
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return raf;
} catch(Exception e) {
IOUtils.closeStream(raf);
onFailure(volume, begin);
throw e;
}
}
/**
* Delete the given directory using {@link FileUtil#fullyDelete(File)}.
*
* @param volume target volume. null if unavailable.
* @param dir directory to be deleted.
* @return true on success false on failure.
*/
public boolean fullyDelete(@Nullable FsVolumeSpi volume, File dir) {
final long begin = profilingEventHook.beforeMetadataOp(volume, DELETE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, DELETE);
boolean deleted = FileUtil.fullyDelete(dir);
LOG.trace("Deletion of dir {} {}", dir, deleted ? "succeeded" : "failed");
profilingEventHook.afterMetadataOp(volume, DELETE, begin);
return deleted;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Move the src file to the target using
* {@link FileUtil#replaceFile(File, File)}.
*
* @param volume target volume. null if unavailable.
* @param src source path.
* @param target target path.
* @throws IOException
*/
public void replaceFile(
@Nullable FsVolumeSpi volume, File src, File target) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MOVE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, MOVE);
FileUtil.replaceFile(src, target);
profilingEventHook.afterMetadataOp(volume, MOVE, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Move the src file to the target using
* {@link Storage#rename(File, File)}.
*
* @param volume target volume. null if unavailable.
* @param src source path.
* @param target target path.
* @throws IOException
*/
public void rename(
@Nullable FsVolumeSpi volume, File src, File target)
throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MOVE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, MOVE);
Storage.rename(src, target);
profilingEventHook.afterMetadataOp(volume, MOVE, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Move the src file to the target using
* {@link FileUtils#moveFile(File, File)}.
*
* @param volume target volume. null if unavailable.
* @param src source path.
* @param target target path.
* @throws IOException
*/
public void moveFile(
@Nullable FsVolumeSpi volume, File src, File target)
throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MOVE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, MOVE);
FileUtils.moveFile(src, target);
profilingEventHook.afterMetadataOp(volume, MOVE, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Move the src file to the target using
* {@link Files#move(Path, Path, CopyOption...)}.
*
* @param volume target volume. null if unavailable.
* @param src source path.
* @param target target path.
* @param options See {@link Files#move} for a description
* of the options.
* @throws IOException
*/
public void move(
@Nullable FsVolumeSpi volume, Path src, Path target,
CopyOption... options) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MOVE);
try {
faultInjectorEventHook.beforeMetadataOp(volume, MOVE);
Files.move(src, target, options);
profilingEventHook.afterMetadataOp(volume, MOVE, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* See {@link Storage#nativeCopyFileUnbuffered(File, File, boolean)}.
*
* @param volume target volume. null if unavailable.
* @param src an existing file to copy, must not be {@code null}
* @param target the new file, must not be {@code null}
* @param preserveFileDate true if the file date of the copy
* should be the same as the original
* @throws IOException
*/
public void nativeCopyFileUnbuffered(
@Nullable FsVolumeSpi volume, File src, File target,
boolean preserveFileDate) throws IOException {
final long length = src.length();
final long begin = profilingEventHook.beforeFileIo(volume, NATIVE_COPY,
length);
try {
faultInjectorEventHook.beforeFileIo(volume, NATIVE_COPY, length);
Storage.nativeCopyFileUnbuffered(src, target, preserveFileDate);
profilingEventHook.afterFileIo(volume, NATIVE_COPY, begin, length);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* See {@link File#mkdirs()}.
*
* @param volume target volume. null if unavailable.
* @param dir directory to be created.
* @return true only if the directory was created. false if
* the directory already exists.
* @throws IOException if a directory with the given name does
* not exist and could not be created.
*/
public boolean mkdirs(
@Nullable FsVolumeSpi volume, File dir) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MKDIRS);
boolean created = false;
boolean isDirectory;
try {
faultInjectorEventHook.beforeMetadataOp(volume, MKDIRS);
created = dir.mkdirs();
isDirectory = !created && dir.isDirectory();
profilingEventHook.afterMetadataOp(volume, MKDIRS, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
if (!created && !isDirectory) {
throw new IOException("Mkdirs failed to create " + dir);
}
return created;
}
/**
* Create the target directory using {@link File#mkdirs()} only if
* it doesn't exist already.
*
* @param volume target volume. null if unavailable.
* @param dir directory to be created.
* @throws IOException if the directory could not created
*/
public void mkdirsWithExistsCheck(
@Nullable FsVolumeSpi volume, File dir) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, MKDIRS);
boolean succeeded = false;
try {
faultInjectorEventHook.beforeMetadataOp(volume, MKDIRS);
succeeded = dir.isDirectory() || dir.mkdirs();
profilingEventHook.afterMetadataOp(volume, MKDIRS, begin);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
if (!succeeded) {
throw new IOException("Mkdirs failed to create " + dir);
}
}
/**
* Get a listing of the given directory using
* {@link FileUtil#listFiles(File)}.
*
* @param volume target volume. null if unavailable.
* @param dir Directory to be listed.
* @return array of file objects representing the directory entries.
* @throws IOException
*/
public File[] listFiles(
@Nullable FsVolumeSpi volume, File dir) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, LIST);
try {
faultInjectorEventHook.beforeMetadataOp(volume, LIST);
File[] children = FileUtil.listFiles(dir);
profilingEventHook.afterMetadataOp(volume, LIST, begin);
return children;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Get a listing of the given directory using
* {@link FileUtil#listFiles(File)}.
*
* @param volume target volume. null if unavailable.
* @param dir directory to be listed.
* @return array of strings representing the directory entries.
* @throws IOException
*/
public String[] list(
@Nullable FsVolumeSpi volume, File dir) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, LIST);
try {
faultInjectorEventHook.beforeMetadataOp(volume, LIST);
String[] children = FileUtil.list(dir);
profilingEventHook.afterMetadataOp(volume, LIST, begin);
return children;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Get a listing of the given directory using
* {@link IOUtils#listDirectory(File, FilenameFilter)}.
*
* @param volume target volume. null if unavailable.
* @param dir Directory to list.
* @param filter {@link FilenameFilter} to filter the directory entries.
* @throws IOException
*/
public List<String> listDirectory(
@Nullable FsVolumeSpi volume, File dir,
FilenameFilter filter) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, LIST);
try {
faultInjectorEventHook.beforeMetadataOp(volume, LIST);
List<String> children = IOUtils.listDirectory(dir, filter);
profilingEventHook.afterMetadataOp(volume, LIST, begin);
return children;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Retrieves the number of links to the specified file.
*
* @param volume target volume. null if unavailable.
* @param f file whose link count is being queried.
* @return number of hard-links to the given file, including the
* given path itself.
* @throws IOException
*/
public int getHardLinkCount(
@Nullable FsVolumeSpi volume, File f) throws IOException {
final long begin = profilingEventHook.beforeMetadataOp(volume, LIST);
try {
faultInjectorEventHook.beforeMetadataOp(volume, LIST);
int count = HardLink.getLinkCount(f);
profilingEventHook.afterMetadataOp(volume, LIST, begin);
return count;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* Check for file existence using {@link File#exists()}.
*
* @param volume target volume. null if unavailable.
* @param f file object.
* @return true if the file exists.
*/
public boolean exists(@Nullable FsVolumeSpi volume, File f) {
final long begin = profilingEventHook.beforeMetadataOp(volume, EXISTS);
try {
faultInjectorEventHook.beforeMetadataOp(volume, EXISTS);
boolean exists = f.exists();
profilingEventHook.afterMetadataOp(volume, EXISTS, begin);
return exists;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* A thin wrapper over {@link FileInputStream} that allows
* instrumenting disk IO.
*/
private final class WrappedFileInputStream extends FileInputStream {
private @Nullable final FsVolumeSpi volume;
/**
* {@inheritDoc}.
*/
private WrappedFileInputStream(@Nullable FsVolumeSpi volume, File f)
throws FileNotFoundException {
super(f);
this.volume = volume;
}
/**
* {@inheritDoc}.
*/
private WrappedFileInputStream(
@Nullable FsVolumeSpi volume, FileDescriptor fd) {
super(fd);
this.volume = volume;
}
/**
* {@inheritDoc}.
*/
@Override
public int read() throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, LEN_INT);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, LEN_INT);
int b = super.read();
profilingEventHook.afterFileIo(volume, READ, begin, LEN_INT);
return b;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* {@inheritDoc}.
*/
@Override
public int read(@Nonnull byte[] b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, b
.length);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, b.length);
int numBytesRead = super.read(b);
profilingEventHook.afterFileIo(volume, READ, begin, numBytesRead);
return numBytesRead;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* {@inheritDoc}.
*/
@Override
public int read(@Nonnull byte[] b, int off, int len) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, len);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, len);
int numBytesRead = super.read(b, off, len);
profilingEventHook.afterFileIo(volume, READ, begin, numBytesRead);
return numBytesRead;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
}
/**
* A thin wrapper over {@link FileOutputStream} that allows
* instrumenting disk IO.
*/
private final class WrappedFileOutputStream extends FileOutputStream {
private @Nullable final FsVolumeSpi volume;
/**
* {@inheritDoc}.
*/
private WrappedFileOutputStream(
@Nullable FsVolumeSpi volume, File f,
boolean append) throws FileNotFoundException {
super(f, append);
this.volume = volume;
}
/**
* {@inheritDoc}.
*/
private WrappedFileOutputStream(
@Nullable FsVolumeSpi volume, FileDescriptor fd) {
super(fd);
this.volume = volume;
}
/**
* {@inheritDoc}.
*/
@Override
public void write(int b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE,
LEN_INT);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, LEN_INT);
super.write(b);
profilingEventHook.afterFileIo(volume, WRITE, begin, LEN_INT);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* {@inheritDoc}.
*/
@Override
public void write(@Nonnull byte[] b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE, b
.length);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, b.length);
super.write(b);
profilingEventHook.afterFileIo(volume, WRITE, begin, b.length);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
/**
* {@inheritDoc}.
*/
@Override
public void write(@Nonnull byte[] b, int off, int len) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE, len);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, len);
super.write(b, off, len);
profilingEventHook.afterFileIo(volume, WRITE, begin, len);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
}
/**
* A thin wrapper over {@link FileInputStream} that allows
* instrumenting IO.
*/
private final class WrappedRandomAccessFile extends RandomAccessFile {
private @Nullable final FsVolumeSpi volume;
public WrappedRandomAccessFile(
@Nullable FsVolumeSpi volume, File f, String mode)
throws FileNotFoundException {
super(f, mode);
this.volume = volume;
}
@Override
public int read() throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, LEN_INT);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, LEN_INT);
int b = super.read();
profilingEventHook.afterFileIo(volume, READ, begin, LEN_INT);
return b;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, len);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, len);
int numBytesRead = super.read(b, off, len);
profilingEventHook.afterFileIo(volume, READ, begin, numBytesRead);
return numBytesRead;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
@Override
public int read(byte[] b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, READ, b
.length);
try {
faultInjectorEventHook.beforeFileIo(volume, READ, b.length);
int numBytesRead = super.read(b);
profilingEventHook.afterFileIo(volume, READ, begin, numBytesRead);
return numBytesRead;
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
@Override
public void write(int b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE,
LEN_INT);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, LEN_INT);
super.write(b);
profilingEventHook.afterFileIo(volume, WRITE, begin, LEN_INT);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
@Override
public void write(@Nonnull byte[] b) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE, b
.length);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, b.length);
super.write(b);
profilingEventHook.afterFileIo(volume, WRITE, begin, b.length);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
final long begin = profilingEventHook.beforeFileIo(volume, WRITE, len);
try {
faultInjectorEventHook.beforeFileIo(volume, WRITE, len);
super.write(b, off, len);
profilingEventHook.afterFileIo(volume, WRITE, begin, len);
} catch(Exception e) {
onFailure(volume, begin);
throw e;
}
}
}
private void onFailure(@Nullable FsVolumeSpi volume, long begin) {
if (datanode != null && volume != null) {
datanode.checkDiskErrorAsync(volume);
}
profilingEventHook.onFailure(volume, begin);
}
public ProfilingFileIoEvents getProfilingEventHook() {
return profilingEventHook;
}
}
|
googleapis/google-cloud-java | 35,318 | java-gkehub/proto-google-cloud-gkehub-v1beta1/src/main/java/com/google/cloud/gkehub/v1beta1/DeleteMembershipRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta1/membership.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1beta1;
/**
*
*
* <pre>
* Request message for `GkeHubMembershipService.DeleteMembership` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.DeleteMembershipRequest}
*/
public final class DeleteMembershipRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1beta1.DeleteMembershipRequest)
DeleteMembershipRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteMembershipRequest.newBuilder() to construct.
private DeleteMembershipRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteMembershipRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteMembershipRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_DeleteMembershipRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_DeleteMembershipRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.class,
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FORCE_FIELD_NUMBER = 5;
private boolean force_ = false;
/**
*
*
* <pre>
* Optional. If set to true, any subresource from this Membership will also be
* deleted. Otherwise, the request will only work if the Membership has no
* subresource.
* </pre>
*
* <code>bool force = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, requestId_);
}
if (force_ != false) {
output.writeBool(5, force_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, requestId_);
}
if (force_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(5, force_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest other =
(com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (getForce() != other.getForce()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (37 * hash) + FORCE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for `GkeHubMembershipService.DeleteMembership` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta1.DeleteMembershipRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1beta1.DeleteMembershipRequest)
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_DeleteMembershipRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_DeleteMembershipRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.class,
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
force_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1beta1.MembershipOuterClass
.internal_static_google_cloud_gkehub_v1beta1_DeleteMembershipRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest build() {
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest buildPartial() {
com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest result =
new com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.force_ = force_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest) {
return mergeFrom((com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest other) {
if (other == com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getForce() != false) {
setForce(other.getForce());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 34:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 40:
{
force_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 40
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Membership resource name in the format
* `projects/*/locations/*/memberships/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A request ID to identify requests. Specify a unique request ID
* so that if you must retry your request, the server will know to ignore
* the request if it has already been completed. The server will guarantee
* that for at least 60 minutes after the first request.
*
* For example, consider a situation where you make an initial request and
* the request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean force_;
/**
*
*
* <pre>
* Optional. If set to true, any subresource from this Membership will also be
* deleted. Otherwise, the request will only work if the Membership has no
* subresource.
* </pre>
*
* <code>bool force = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The force.
*/
@java.lang.Override
public boolean getForce() {
return force_;
}
/**
*
*
* <pre>
* Optional. If set to true, any subresource from this Membership will also be
* deleted. Otherwise, the request will only work if the Membership has no
* subresource.
* </pre>
*
* <code>bool force = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The force to set.
* @return This builder for chaining.
*/
public Builder setForce(boolean value) {
force_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. If set to true, any subresource from this Membership will also be
* deleted. Otherwise, the request will only work if the Membership has no
* subresource.
* </pre>
*
* <code>bool force = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearForce() {
bitField0_ = (bitField0_ & ~0x00000004);
force_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1beta1.DeleteMembershipRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1beta1.DeleteMembershipRequest)
private static final com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest();
}
public static com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteMembershipRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteMembershipRequest>() {
@java.lang.Override
public DeleteMembershipRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteMembershipRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteMembershipRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta1.DeleteMembershipRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,188 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/ListHooksResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* ListHooksResponse is response to list hooks.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListHooksResponse}
*/
public final class ListHooksResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.ListHooksResponse)
ListHooksResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListHooksResponse.newBuilder() to construct.
private ListHooksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListHooksResponse() {
hooks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListHooksResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListHooksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListHooksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListHooksResponse.class,
com.google.cloud.securesourcemanager.v1.ListHooksResponse.Builder.class);
}
public static final int HOOKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securesourcemanager.v1.Hook> hooks_;
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securesourcemanager.v1.Hook> getHooksList() {
return hooks_;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.HookOrBuilder>
getHooksOrBuilderList() {
return hooks_;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
@java.lang.Override
public int getHooksCount() {
return hooks_.size();
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.Hook getHooks(int index) {
return hooks_.get(index);
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHooksOrBuilder(int index) {
return hooks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < hooks_.size(); i++) {
output.writeMessage(1, hooks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < hooks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, hooks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.ListHooksResponse)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.ListHooksResponse other =
(com.google.cloud.securesourcemanager.v1.ListHooksResponse) obj;
if (!getHooksList().equals(other.getHooksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getHooksCount() > 0) {
hash = (37 * hash) + HOOKS_FIELD_NUMBER;
hash = (53 * hash) + getHooksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.ListHooksResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ListHooksResponse is response to list hooks.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.ListHooksResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.ListHooksResponse)
com.google.cloud.securesourcemanager.v1.ListHooksResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListHooksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListHooksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.ListHooksResponse.class,
com.google.cloud.securesourcemanager.v1.ListHooksResponse.Builder.class);
}
// Construct using com.google.cloud.securesourcemanager.v1.ListHooksResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (hooksBuilder_ == null) {
hooks_ = java.util.Collections.emptyList();
} else {
hooks_ = null;
hooksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_ListHooksResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListHooksResponse getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.ListHooksResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListHooksResponse build() {
com.google.cloud.securesourcemanager.v1.ListHooksResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListHooksResponse buildPartial() {
com.google.cloud.securesourcemanager.v1.ListHooksResponse result =
new com.google.cloud.securesourcemanager.v1.ListHooksResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securesourcemanager.v1.ListHooksResponse result) {
if (hooksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
hooks_ = java.util.Collections.unmodifiableList(hooks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.hooks_ = hooks_;
} else {
result.hooks_ = hooksBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.securesourcemanager.v1.ListHooksResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.ListHooksResponse) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.ListHooksResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.ListHooksResponse other) {
if (other == com.google.cloud.securesourcemanager.v1.ListHooksResponse.getDefaultInstance())
return this;
if (hooksBuilder_ == null) {
if (!other.hooks_.isEmpty()) {
if (hooks_.isEmpty()) {
hooks_ = other.hooks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureHooksIsMutable();
hooks_.addAll(other.hooks_);
}
onChanged();
}
} else {
if (!other.hooks_.isEmpty()) {
if (hooksBuilder_.isEmpty()) {
hooksBuilder_.dispose();
hooksBuilder_ = null;
hooks_ = other.hooks_;
bitField0_ = (bitField0_ & ~0x00000001);
hooksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getHooksFieldBuilder()
: null;
} else {
hooksBuilder_.addAllMessages(other.hooks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securesourcemanager.v1.Hook m =
input.readMessage(
com.google.cloud.securesourcemanager.v1.Hook.parser(), extensionRegistry);
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
hooks_.add(m);
} else {
hooksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securesourcemanager.v1.Hook> hooks_ =
java.util.Collections.emptyList();
private void ensureHooksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
hooks_ = new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.Hook>(hooks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
hooksBuilder_;
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.Hook> getHooksList() {
if (hooksBuilder_ == null) {
return java.util.Collections.unmodifiableList(hooks_);
} else {
return hooksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public int getHooksCount() {
if (hooksBuilder_ == null) {
return hooks_.size();
} else {
return hooksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Hook getHooks(int index) {
if (hooksBuilder_ == null) {
return hooks_.get(index);
} else {
return hooksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder setHooks(int index, com.google.cloud.securesourcemanager.v1.Hook value) {
if (hooksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureHooksIsMutable();
hooks_.set(index, value);
onChanged();
} else {
hooksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder setHooks(
int index, com.google.cloud.securesourcemanager.v1.Hook.Builder builderForValue) {
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
hooks_.set(index, builderForValue.build());
onChanged();
} else {
hooksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder addHooks(com.google.cloud.securesourcemanager.v1.Hook value) {
if (hooksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureHooksIsMutable();
hooks_.add(value);
onChanged();
} else {
hooksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder addHooks(int index, com.google.cloud.securesourcemanager.v1.Hook value) {
if (hooksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureHooksIsMutable();
hooks_.add(index, value);
onChanged();
} else {
hooksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder addHooks(com.google.cloud.securesourcemanager.v1.Hook.Builder builderForValue) {
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
hooks_.add(builderForValue.build());
onChanged();
} else {
hooksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder addHooks(
int index, com.google.cloud.securesourcemanager.v1.Hook.Builder builderForValue) {
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
hooks_.add(index, builderForValue.build());
onChanged();
} else {
hooksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder addAllHooks(
java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.Hook> values) {
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, hooks_);
onChanged();
} else {
hooksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder clearHooks() {
if (hooksBuilder_ == null) {
hooks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
hooksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public Builder removeHooks(int index) {
if (hooksBuilder_ == null) {
ensureHooksIsMutable();
hooks_.remove(index);
onChanged();
} else {
hooksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Hook.Builder getHooksBuilder(int index) {
return getHooksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHooksOrBuilder(int index) {
if (hooksBuilder_ == null) {
return hooks_.get(index);
} else {
return hooksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.HookOrBuilder>
getHooksOrBuilderList() {
if (hooksBuilder_ != null) {
return hooksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(hooks_);
}
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Hook.Builder addHooksBuilder() {
return getHooksFieldBuilder()
.addBuilder(com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.Hook.Builder addHooksBuilder(int index) {
return getHooksFieldBuilder()
.addBuilder(index, com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of hooks.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.Hook hooks = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.Hook.Builder>
getHooksBuilderList() {
return getHooksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
getHooksFieldBuilder() {
if (hooksBuilder_ == null) {
hooksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>(
hooks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
hooks_ = null;
}
return hooksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.ListHooksResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.ListHooksResponse)
private static final com.google.cloud.securesourcemanager.v1.ListHooksResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.ListHooksResponse();
}
public static com.google.cloud.securesourcemanager.v1.ListHooksResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListHooksResponse> PARSER =
new com.google.protobuf.AbstractParser<ListHooksResponse>() {
@java.lang.Override
public ListHooksResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListHooksResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListHooksResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.ListHooksResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,290 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/DeployPolicyResourceSelector.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/cloud_deploy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* Contains information on the resources to select for a deploy policy.
* Attributes provided must all match the resource in order for policy
* restrictions to apply. For example, if delivery pipelines attributes given
* are an id "prod" and labels "foo: bar", a delivery pipeline resource must
* match both that id and have that label in order to be subject to the policy.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.DeployPolicyResourceSelector}
*/
public final class DeployPolicyResourceSelector extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.DeployPolicyResourceSelector)
DeployPolicyResourceSelectorOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeployPolicyResourceSelector.newBuilder() to construct.
private DeployPolicyResourceSelector(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeployPolicyResourceSelector() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeployPolicyResourceSelector();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_DeployPolicyResourceSelector_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_DeployPolicyResourceSelector_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.DeployPolicyResourceSelector.class,
com.google.cloud.deploy.v1.DeployPolicyResourceSelector.Builder.class);
}
private int bitField0_;
public static final int DELIVERY_PIPELINE_FIELD_NUMBER = 1;
private com.google.cloud.deploy.v1.DeliveryPipelineAttribute deliveryPipeline_;
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the deliveryPipeline field is set.
*/
@java.lang.Override
public boolean hasDeliveryPipeline() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The deliveryPipeline.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineAttribute getDeliveryPipeline() {
return deliveryPipeline_ == null
? com.google.cloud.deploy.v1.DeliveryPipelineAttribute.getDefaultInstance()
: deliveryPipeline_;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineAttributeOrBuilder
getDeliveryPipelineOrBuilder() {
return deliveryPipeline_ == null
? com.google.cloud.deploy.v1.DeliveryPipelineAttribute.getDefaultInstance()
: deliveryPipeline_;
}
public static final int TARGET_FIELD_NUMBER = 2;
private com.google.cloud.deploy.v1.TargetAttribute target_;
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the target field is set.
*/
@java.lang.Override
public boolean hasTarget() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The target.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.TargetAttribute getTarget() {
return target_ == null
? com.google.cloud.deploy.v1.TargetAttribute.getDefaultInstance()
: target_;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.deploy.v1.TargetAttributeOrBuilder getTargetOrBuilder() {
return target_ == null
? com.google.cloud.deploy.v1.TargetAttribute.getDefaultInstance()
: target_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getDeliveryPipeline());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTarget());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDeliveryPipeline());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTarget());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.DeployPolicyResourceSelector)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.DeployPolicyResourceSelector other =
(com.google.cloud.deploy.v1.DeployPolicyResourceSelector) obj;
if (hasDeliveryPipeline() != other.hasDeliveryPipeline()) return false;
if (hasDeliveryPipeline()) {
if (!getDeliveryPipeline().equals(other.getDeliveryPipeline())) return false;
}
if (hasTarget() != other.hasTarget()) return false;
if (hasTarget()) {
if (!getTarget().equals(other.getTarget())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDeliveryPipeline()) {
hash = (37 * hash) + DELIVERY_PIPELINE_FIELD_NUMBER;
hash = (53 * hash) + getDeliveryPipeline().hashCode();
}
if (hasTarget()) {
hash = (37 * hash) + TARGET_FIELD_NUMBER;
hash = (53 * hash) + getTarget().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.deploy.v1.DeployPolicyResourceSelector prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Contains information on the resources to select for a deploy policy.
* Attributes provided must all match the resource in order for policy
* restrictions to apply. For example, if delivery pipelines attributes given
* are an id "prod" and labels "foo: bar", a delivery pipeline resource must
* match both that id and have that label in order to be subject to the policy.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.DeployPolicyResourceSelector}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.DeployPolicyResourceSelector)
com.google.cloud.deploy.v1.DeployPolicyResourceSelectorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_DeployPolicyResourceSelector_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_DeployPolicyResourceSelector_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.DeployPolicyResourceSelector.class,
com.google.cloud.deploy.v1.DeployPolicyResourceSelector.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.DeployPolicyResourceSelector.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDeliveryPipelineFieldBuilder();
getTargetFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
deliveryPipeline_ = null;
if (deliveryPipelineBuilder_ != null) {
deliveryPipelineBuilder_.dispose();
deliveryPipelineBuilder_ = null;
}
target_ = null;
if (targetBuilder_ != null) {
targetBuilder_.dispose();
targetBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.CloudDeployProto
.internal_static_google_cloud_deploy_v1_DeployPolicyResourceSelector_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeployPolicyResourceSelector getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.DeployPolicyResourceSelector.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeployPolicyResourceSelector build() {
com.google.cloud.deploy.v1.DeployPolicyResourceSelector result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeployPolicyResourceSelector buildPartial() {
com.google.cloud.deploy.v1.DeployPolicyResourceSelector result =
new com.google.cloud.deploy.v1.DeployPolicyResourceSelector(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.deploy.v1.DeployPolicyResourceSelector result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.deliveryPipeline_ =
deliveryPipelineBuilder_ == null ? deliveryPipeline_ : deliveryPipelineBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.target_ = targetBuilder_ == null ? target_ : targetBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.DeployPolicyResourceSelector) {
return mergeFrom((com.google.cloud.deploy.v1.DeployPolicyResourceSelector) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.DeployPolicyResourceSelector other) {
if (other == com.google.cloud.deploy.v1.DeployPolicyResourceSelector.getDefaultInstance())
return this;
if (other.hasDeliveryPipeline()) {
mergeDeliveryPipeline(other.getDeliveryPipeline());
}
if (other.hasTarget()) {
mergeTarget(other.getTarget());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getDeliveryPipelineFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTargetFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.deploy.v1.DeliveryPipelineAttribute deliveryPipeline_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.DeliveryPipelineAttribute,
com.google.cloud.deploy.v1.DeliveryPipelineAttribute.Builder,
com.google.cloud.deploy.v1.DeliveryPipelineAttributeOrBuilder>
deliveryPipelineBuilder_;
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the deliveryPipeline field is set.
*/
public boolean hasDeliveryPipeline() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The deliveryPipeline.
*/
public com.google.cloud.deploy.v1.DeliveryPipelineAttribute getDeliveryPipeline() {
if (deliveryPipelineBuilder_ == null) {
return deliveryPipeline_ == null
? com.google.cloud.deploy.v1.DeliveryPipelineAttribute.getDefaultInstance()
: deliveryPipeline_;
} else {
return deliveryPipelineBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setDeliveryPipeline(com.google.cloud.deploy.v1.DeliveryPipelineAttribute value) {
if (deliveryPipelineBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deliveryPipeline_ = value;
} else {
deliveryPipelineBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setDeliveryPipeline(
com.google.cloud.deploy.v1.DeliveryPipelineAttribute.Builder builderForValue) {
if (deliveryPipelineBuilder_ == null) {
deliveryPipeline_ = builderForValue.build();
} else {
deliveryPipelineBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeDeliveryPipeline(
com.google.cloud.deploy.v1.DeliveryPipelineAttribute value) {
if (deliveryPipelineBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& deliveryPipeline_ != null
&& deliveryPipeline_
!= com.google.cloud.deploy.v1.DeliveryPipelineAttribute.getDefaultInstance()) {
getDeliveryPipelineBuilder().mergeFrom(value);
} else {
deliveryPipeline_ = value;
}
} else {
deliveryPipelineBuilder_.mergeFrom(value);
}
if (deliveryPipeline_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearDeliveryPipeline() {
bitField0_ = (bitField0_ & ~0x00000001);
deliveryPipeline_ = null;
if (deliveryPipelineBuilder_ != null) {
deliveryPipelineBuilder_.dispose();
deliveryPipelineBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.DeliveryPipelineAttribute.Builder
getDeliveryPipelineBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getDeliveryPipelineFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.DeliveryPipelineAttributeOrBuilder
getDeliveryPipelineOrBuilder() {
if (deliveryPipelineBuilder_ != null) {
return deliveryPipelineBuilder_.getMessageOrBuilder();
} else {
return deliveryPipeline_ == null
? com.google.cloud.deploy.v1.DeliveryPipelineAttribute.getDefaultInstance()
: deliveryPipeline_;
}
}
/**
*
*
* <pre>
* Optional. Contains attributes about a delivery pipeline.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.DeliveryPipelineAttribute delivery_pipeline = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.DeliveryPipelineAttribute,
com.google.cloud.deploy.v1.DeliveryPipelineAttribute.Builder,
com.google.cloud.deploy.v1.DeliveryPipelineAttributeOrBuilder>
getDeliveryPipelineFieldBuilder() {
if (deliveryPipelineBuilder_ == null) {
deliveryPipelineBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.DeliveryPipelineAttribute,
com.google.cloud.deploy.v1.DeliveryPipelineAttribute.Builder,
com.google.cloud.deploy.v1.DeliveryPipelineAttributeOrBuilder>(
getDeliveryPipeline(), getParentForChildren(), isClean());
deliveryPipeline_ = null;
}
return deliveryPipelineBuilder_;
}
private com.google.cloud.deploy.v1.TargetAttribute target_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.TargetAttribute,
com.google.cloud.deploy.v1.TargetAttribute.Builder,
com.google.cloud.deploy.v1.TargetAttributeOrBuilder>
targetBuilder_;
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the target field is set.
*/
public boolean hasTarget() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The target.
*/
public com.google.cloud.deploy.v1.TargetAttribute getTarget() {
if (targetBuilder_ == null) {
return target_ == null
? com.google.cloud.deploy.v1.TargetAttribute.getDefaultInstance()
: target_;
} else {
return targetBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setTarget(com.google.cloud.deploy.v1.TargetAttribute value) {
if (targetBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
target_ = value;
} else {
targetBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setTarget(com.google.cloud.deploy.v1.TargetAttribute.Builder builderForValue) {
if (targetBuilder_ == null) {
target_ = builderForValue.build();
} else {
targetBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeTarget(com.google.cloud.deploy.v1.TargetAttribute value) {
if (targetBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& target_ != null
&& target_ != com.google.cloud.deploy.v1.TargetAttribute.getDefaultInstance()) {
getTargetBuilder().mergeFrom(value);
} else {
target_ = value;
}
} else {
targetBuilder_.mergeFrom(value);
}
if (target_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearTarget() {
bitField0_ = (bitField0_ & ~0x00000002);
target_ = null;
if (targetBuilder_ != null) {
targetBuilder_.dispose();
targetBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.TargetAttribute.Builder getTargetBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTargetFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.deploy.v1.TargetAttributeOrBuilder getTargetOrBuilder() {
if (targetBuilder_ != null) {
return targetBuilder_.getMessageOrBuilder();
} else {
return target_ == null
? com.google.cloud.deploy.v1.TargetAttribute.getDefaultInstance()
: target_;
}
}
/**
*
*
* <pre>
* Optional. Contains attributes about a target.
* </pre>
*
* <code>
* .google.cloud.deploy.v1.TargetAttribute target = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.TargetAttribute,
com.google.cloud.deploy.v1.TargetAttribute.Builder,
com.google.cloud.deploy.v1.TargetAttributeOrBuilder>
getTargetFieldBuilder() {
if (targetBuilder_ == null) {
targetBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.deploy.v1.TargetAttribute,
com.google.cloud.deploy.v1.TargetAttribute.Builder,
com.google.cloud.deploy.v1.TargetAttributeOrBuilder>(
getTarget(), getParentForChildren(), isClean());
target_ = null;
}
return targetBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.DeployPolicyResourceSelector)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.DeployPolicyResourceSelector)
private static final com.google.cloud.deploy.v1.DeployPolicyResourceSelector DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.DeployPolicyResourceSelector();
}
public static com.google.cloud.deploy.v1.DeployPolicyResourceSelector getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeployPolicyResourceSelector> PARSER =
new com.google.protobuf.AbstractParser<DeployPolicyResourceSelector>() {
@java.lang.Override
public DeployPolicyResourceSelector parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeployPolicyResourceSelector> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeployPolicyResourceSelector> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeployPolicyResourceSelector getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,259 | java-notebooks/proto-google-cloud-notebooks-v2/src/main/java/com/google/cloud/notebooks/v2/DataDisk.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v2/gce_setup.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v2;
/**
*
*
* <pre>
* An instance-attached disk resource.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.DataDisk}
*/
public final class DataDisk extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v2.DataDisk)
DataDiskOrBuilder {
private static final long serialVersionUID = 0L;
// Use DataDisk.newBuilder() to construct.
private DataDisk(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DataDisk() {
diskType_ = 0;
diskEncryption_ = 0;
kmsKey_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DataDisk();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_DataDisk_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_DataDisk_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.DataDisk.class,
com.google.cloud.notebooks.v2.DataDisk.Builder.class);
}
public static final int DISK_SIZE_GB_FIELD_NUMBER = 1;
private long diskSizeGb_ = 0L;
/**
*
*
* <pre>
* Optional. The size of the disk in GB attached to this VM instance, up to a
* maximum of 64000 GB (64 TB). If not specified, this defaults to 100.
* </pre>
*
* <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskSizeGb.
*/
@java.lang.Override
public long getDiskSizeGb() {
return diskSizeGb_;
}
public static final int DISK_TYPE_FIELD_NUMBER = 2;
private int diskType_ = 0;
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for diskType.
*/
@java.lang.Override
public int getDiskTypeValue() {
return diskType_;
}
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The diskType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.DiskType getDiskType() {
com.google.cloud.notebooks.v2.DiskType result =
com.google.cloud.notebooks.v2.DiskType.forNumber(diskType_);
return result == null ? com.google.cloud.notebooks.v2.DiskType.UNRECOGNIZED : result;
}
public static final int DISK_ENCRYPTION_FIELD_NUMBER = 5;
private int diskEncryption_ = 0;
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for diskEncryption.
*/
@java.lang.Override
public int getDiskEncryptionValue() {
return diskEncryption_;
}
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The diskEncryption.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.DiskEncryption getDiskEncryption() {
com.google.cloud.notebooks.v2.DiskEncryption result =
com.google.cloud.notebooks.v2.DiskEncryption.forNumber(diskEncryption_);
return result == null ? com.google.cloud.notebooks.v2.DiskEncryption.UNRECOGNIZED : result;
}
public static final int KMS_KEY_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object kmsKey_ = "";
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The kmsKey.
*/
@java.lang.Override
public java.lang.String getKmsKey() {
java.lang.Object ref = kmsKey_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
kmsKey_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The bytes for kmsKey.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKmsKeyBytes() {
java.lang.Object ref = kmsKey_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
kmsKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (diskSizeGb_ != 0L) {
output.writeInt64(1, diskSizeGb_);
}
if (diskType_ != com.google.cloud.notebooks.v2.DiskType.DISK_TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(2, diskType_);
}
if (diskEncryption_
!= com.google.cloud.notebooks.v2.DiskEncryption.DISK_ENCRYPTION_UNSPECIFIED.getNumber()) {
output.writeEnum(5, diskEncryption_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKey_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, kmsKey_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (diskSizeGb_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, diskSizeGb_);
}
if (diskType_ != com.google.cloud.notebooks.v2.DiskType.DISK_TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, diskType_);
}
if (diskEncryption_
!= com.google.cloud.notebooks.v2.DiskEncryption.DISK_ENCRYPTION_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, diskEncryption_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKey_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, kmsKey_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v2.DataDisk)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v2.DataDisk other = (com.google.cloud.notebooks.v2.DataDisk) obj;
if (getDiskSizeGb() != other.getDiskSizeGb()) return false;
if (diskType_ != other.diskType_) return false;
if (diskEncryption_ != other.diskEncryption_) return false;
if (!getKmsKey().equals(other.getKmsKey())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DISK_SIZE_GB_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getDiskSizeGb());
hash = (37 * hash) + DISK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + diskType_;
hash = (37 * hash) + DISK_ENCRYPTION_FIELD_NUMBER;
hash = (53 * hash) + diskEncryption_;
hash = (37 * hash) + KMS_KEY_FIELD_NUMBER;
hash = (53 * hash) + getKmsKey().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.DataDisk parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.DataDisk parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.DataDisk parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v2.DataDisk prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* An instance-attached disk resource.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.DataDisk}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v2.DataDisk)
com.google.cloud.notebooks.v2.DataDiskOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_DataDisk_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_DataDisk_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.DataDisk.class,
com.google.cloud.notebooks.v2.DataDisk.Builder.class);
}
// Construct using com.google.cloud.notebooks.v2.DataDisk.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
diskSizeGb_ = 0L;
diskType_ = 0;
diskEncryption_ = 0;
kmsKey_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v2.GceSetupProto
.internal_static_google_cloud_notebooks_v2_DataDisk_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.DataDisk getDefaultInstanceForType() {
return com.google.cloud.notebooks.v2.DataDisk.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v2.DataDisk build() {
com.google.cloud.notebooks.v2.DataDisk result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.DataDisk buildPartial() {
com.google.cloud.notebooks.v2.DataDisk result =
new com.google.cloud.notebooks.v2.DataDisk(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v2.DataDisk result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.diskSizeGb_ = diskSizeGb_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.diskType_ = diskType_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.diskEncryption_ = diskEncryption_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.kmsKey_ = kmsKey_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v2.DataDisk) {
return mergeFrom((com.google.cloud.notebooks.v2.DataDisk) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v2.DataDisk other) {
if (other == com.google.cloud.notebooks.v2.DataDisk.getDefaultInstance()) return this;
if (other.getDiskSizeGb() != 0L) {
setDiskSizeGb(other.getDiskSizeGb());
}
if (other.diskType_ != 0) {
setDiskTypeValue(other.getDiskTypeValue());
}
if (other.diskEncryption_ != 0) {
setDiskEncryptionValue(other.getDiskEncryptionValue());
}
if (!other.getKmsKey().isEmpty()) {
kmsKey_ = other.kmsKey_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
diskSizeGb_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
diskType_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 40:
{
diskEncryption_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 40
case 50:
{
kmsKey_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long diskSizeGb_;
/**
*
*
* <pre>
* Optional. The size of the disk in GB attached to this VM instance, up to a
* maximum of 64000 GB (64 TB). If not specified, this defaults to 100.
* </pre>
*
* <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The diskSizeGb.
*/
@java.lang.Override
public long getDiskSizeGb() {
return diskSizeGb_;
}
/**
*
*
* <pre>
* Optional. The size of the disk in GB attached to this VM instance, up to a
* maximum of 64000 GB (64 TB). If not specified, this defaults to 100.
* </pre>
*
* <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The diskSizeGb to set.
* @return This builder for chaining.
*/
public Builder setDiskSizeGb(long value) {
diskSizeGb_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The size of the disk in GB attached to this VM instance, up to a
* maximum of 64000 GB (64 TB). If not specified, this defaults to 100.
* </pre>
*
* <code>int64 disk_size_gb = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearDiskSizeGb() {
bitField0_ = (bitField0_ & ~0x00000001);
diskSizeGb_ = 0L;
onChanged();
return this;
}
private int diskType_ = 0;
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for diskType.
*/
@java.lang.Override
public int getDiskTypeValue() {
return diskType_;
}
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for diskType to set.
* @return This builder for chaining.
*/
public Builder setDiskTypeValue(int value) {
diskType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The diskType.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.DiskType getDiskType() {
com.google.cloud.notebooks.v2.DiskType result =
com.google.cloud.notebooks.v2.DiskType.forNumber(diskType_);
return result == null ? com.google.cloud.notebooks.v2.DiskType.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The diskType to set.
* @return This builder for chaining.
*/
public Builder setDiskType(com.google.cloud.notebooks.v2.DiskType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
diskType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. Indicates the type of the disk.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskType disk_type = 2 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearDiskType() {
bitField0_ = (bitField0_ & ~0x00000002);
diskType_ = 0;
onChanged();
return this;
}
private int diskEncryption_ = 0;
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for diskEncryption.
*/
@java.lang.Override
public int getDiskEncryptionValue() {
return diskEncryption_;
}
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for diskEncryption to set.
* @return This builder for chaining.
*/
public Builder setDiskEncryptionValue(int value) {
diskEncryption_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The diskEncryption.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.DiskEncryption getDiskEncryption() {
com.google.cloud.notebooks.v2.DiskEncryption result =
com.google.cloud.notebooks.v2.DiskEncryption.forNumber(diskEncryption_);
return result == null ? com.google.cloud.notebooks.v2.DiskEncryption.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The diskEncryption to set.
* @return This builder for chaining.
*/
public Builder setDiskEncryption(com.google.cloud.notebooks.v2.DiskEncryption value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
diskEncryption_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. Disk encryption method used on the boot and data
* disks, defaults to GMEK.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.DiskEncryption disk_encryption = 5 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearDiskEncryption() {
bitField0_ = (bitField0_ & ~0x00000004);
diskEncryption_ = 0;
onChanged();
return this;
}
private java.lang.Object kmsKey_ = "";
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The kmsKey.
*/
public java.lang.String getKmsKey() {
java.lang.Object ref = kmsKey_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
kmsKey_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The bytes for kmsKey.
*/
public com.google.protobuf.ByteString getKmsKeyBytes() {
java.lang.Object ref = kmsKey_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
kmsKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The kmsKey to set.
* @return This builder for chaining.
*/
public Builder setKmsKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
kmsKey_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearKmsKey() {
kmsKey_ = getDefaultInstance().getKmsKey();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input only. The KMS key used to encrypt the disks, only
* applicable if disk_encryption is CMEK. Format:
* `projects/{project_id}/locations/{location}/keyRings/{key_ring_id}/cryptoKeys/{key_id}`
*
* Learn more about using your own encryption keys.
* </pre>
*
* <code>
* string kms_key = 6 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The bytes for kmsKey to set.
* @return This builder for chaining.
*/
public Builder setKmsKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
kmsKey_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v2.DataDisk)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v2.DataDisk)
private static final com.google.cloud.notebooks.v2.DataDisk DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v2.DataDisk();
}
public static com.google.cloud.notebooks.v2.DataDisk getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DataDisk> PARSER =
new com.google.protobuf.AbstractParser<DataDisk>() {
@java.lang.Override
public DataDisk parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DataDisk> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DataDisk> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.DataDisk getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 35,328 | jdk/src/windows/classes/sun/awt/windows/WToolkit.java | /*
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.windows;
import java.awt.*;
import java.awt.im.InputMethodHighlight;
import java.awt.im.spi.InputMethodDescriptor;
import java.awt.image.*;
import java.awt.peer.*;
import java.awt.event.KeyEvent;
import java.awt.datatransfer.Clipboard;
import java.awt.TrayIcon;
import java.beans.PropertyChangeListener;
import java.security.AccessController;
import java.security.PrivilegedAction;
import sun.awt.AWTAutoShutdown;
import sun.awt.LightweightFrame;
import sun.awt.SunToolkit;
import sun.awt.Win32GraphicsDevice;
import sun.awt.Win32GraphicsEnvironment;
import sun.java2d.d3d.D3DRenderQueue;
import sun.java2d.opengl.OGLRenderQueue;
import sun.print.PrintJob2D;
import java.awt.dnd.DragSource;
import java.awt.dnd.DragGestureListener;
import java.awt.dnd.DragGestureEvent;
import java.awt.dnd.DragGestureRecognizer;
import java.awt.dnd.MouseDragGestureRecognizer;
import java.awt.dnd.InvalidDnDOperationException;
import java.awt.dnd.peer.DragSourceContextPeer;
import java.util.Hashtable;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import sun.font.FontManager;
import sun.font.FontManagerFactory;
import sun.font.SunFontManager;
import sun.misc.PerformanceLogger;
import sun.util.logging.PlatformLogger;
import sun.security.util.SecurityConstants;
public class WToolkit extends SunToolkit implements Runnable {
private static final PlatformLogger log = PlatformLogger.getLogger("sun.awt.windows.WToolkit");
static GraphicsConfiguration config;
// System clipboard.
WClipboard clipboard;
// cache of font peers
private Hashtable<String,FontPeer> cacheFontPeer;
// Windows properties
private WDesktopProperties wprops;
// Dynamic Layout Resize client code setting
protected boolean dynamicLayoutSetting = false;
//Is it allowed to generate events assigned to extra mouse buttons.
//Set to true by default.
private static boolean areExtraMouseButtonsEnabled = true;
/**
* Initialize JNI field and method IDs
*/
private static native void initIDs();
private static boolean loaded = false;
public static void loadLibraries() {
if (!loaded) {
java.security.AccessController.doPrivileged(
new java.security.PrivilegedAction<Void>() {
public Void run() {
System.loadLibrary("awt");
return null;
}
});
loaded = true;
}
}
private static native String getWindowsVersion();
static {
loadLibraries();
initIDs();
// Print out which version of Windows is running
if (log.isLoggable(PlatformLogger.Level.FINE)) {
log.fine("Win version: " + getWindowsVersion());
}
AccessController.doPrivileged(
new PrivilegedAction <Void> ()
{
public Void run() {
String browserProp = System.getProperty("browser");
if (browserProp != null && browserProp.equals("sun.plugin")) {
disableCustomPalette();
}
return null;
}
});
}
private static native void disableCustomPalette();
/*
* Reset the static GraphicsConfiguration to the default. Called on
* startup and when display settings have changed.
*/
public static void resetGC() {
if (GraphicsEnvironment.isHeadless()) {
config = null;
} else {
config = (GraphicsEnvironment
.getLocalGraphicsEnvironment()
.getDefaultScreenDevice()
.getDefaultConfiguration());
}
}
/*
* NOTE: The following embedded*() methods are non-public API intended
* for internal use only. The methods are unsupported and could go
* away in future releases.
*
* New hook functions for using the AWT as an embedded service. These
* functions replace the global C function AwtInit() which was previously
* exported by awt.dll.
*
* When used as an embedded service, the AWT does NOT have its own
* message pump. It instead relies on the parent application to provide
* this functionality. embeddedInit() assumes that the thread on which it
* is called is the message pumping thread. Violating this assumption
* will lead to undefined behavior.
*
* embeddedInit must be called before the WToolkit() constructor.
* embeddedDispose should be called before the applicaton terminates the
* Java VM. It is currently unsafe to reinitialize the toolkit again
* after it has been disposed. Instead, awt.dll must be reloaded and the
* class loader which loaded WToolkit must be finalized before it is
* safe to reuse AWT. Dynamic reusability may be added to the toolkit in
* the future.
*/
/**
* Initializes the Toolkit for use in an embedded environment.
*
* @return true if the the initialization succeeded; false if it failed.
* The function will fail if the Toolkit was already initialized.
* @since 1.3
*/
public static native boolean embeddedInit();
/**
* Disposes the Toolkit in an embedded environment. This method should
* not be called on exit unless the Toolkit was constructed with
* embeddedInit.
*
* @return true if the disposal succeeded; false if it failed. The
* function will fail if the calling thread is not the same
* thread which called embeddedInit(), or if the Toolkit was
* already disposed.
* @since 1.3
*/
public static native boolean embeddedDispose();
/**
* To be called after processing the event queue by users of the above
* embeddedInit() function. The reason for this additional call is that
* there are some operations performed during idle time in the AwtToolkit
* event loop which should also be performed during idle time in any
* other native event loop. Failure to do so could result in
* deadlocks.
*
* This method was added at the last minute of the jdk1.4 release
* to work around a specific customer problem. As with the above
* embedded*() class, this method is non-public and should not be
* used by external applications.
*
* See bug #4526587 for more information.
*/
public native void embeddedEventLoopIdleProcessing();
public static final String DATA_TRANSFERER_CLASS_NAME = "sun.awt.windows.WDataTransferer";
static class ToolkitDisposer implements sun.java2d.DisposerRecord {
public void dispose() {
WToolkit.postDispose();
}
}
private final Object anchor = new Object();
private static native void postDispose();
private static native boolean startToolkitThread(Runnable thread);
public WToolkit() {
// Startup toolkit threads
if (PerformanceLogger.loggingEnabled()) {
PerformanceLogger.setTime("WToolkit construction");
}
sun.java2d.Disposer.addRecord(anchor, new ToolkitDisposer());
/*
* Fix for 4701990.
* AWTAutoShutdown state must be changed before the toolkit thread
* starts to avoid race condition.
*/
AWTAutoShutdown.notifyToolkitThreadBusy();
if (!startToolkitThread(this)) {
Thread toolkitThread = new Thread(this, "AWT-Windows");
toolkitThread.setDaemon(true);
toolkitThread.start();
}
try {
synchronized(this) {
while(!inited) {
wait();
}
}
} catch (InterruptedException x) {
// swallow the exception
}
SunToolkit.setDataTransfererClassName(DATA_TRANSFERER_CLASS_NAME);
// Enabled "live resizing" by default. It remains controlled
// by the native system though.
setDynamicLayout(true);
areExtraMouseButtonsEnabled = Boolean.parseBoolean(System.getProperty("sun.awt.enableExtraMouseButtons", "true"));
//set system property if not yet assigned
System.setProperty("sun.awt.enableExtraMouseButtons", ""+areExtraMouseButtonsEnabled);
setExtraMouseButtonsEnabledNative(areExtraMouseButtonsEnabled);
}
private final void registerShutdownHook() {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
ThreadGroup currentTG =
Thread.currentThread().getThreadGroup();
ThreadGroup parentTG = currentTG.getParent();
while (parentTG != null) {
currentTG = parentTG;
parentTG = currentTG.getParent();
}
Thread shutdown = new Thread(currentTG, new Runnable() {
public void run() {
shutdown();
}
});
shutdown.setContextClassLoader(null);
Runtime.getRuntime().addShutdownHook(shutdown);
return null;
}
});
}
public void run() {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY+1);
boolean startPump = init();
if (startPump) {
registerShutdownHook();
}
synchronized(this) {
inited = true;
notifyAll();
}
if (startPump) {
eventLoop(); // will Dispose Toolkit when shutdown hook executes
}
}
/*
* eventLoop() begins the native message pump which retrieves and processes
* native events.
*
* When shutdown() is called by the ShutdownHook added in run(), a
* WM_QUIT message is posted to the Toolkit thread indicating that
* eventLoop() should Dispose the toolkit and exit.
*/
private native boolean init();
private boolean inited = false;
private native void eventLoop();
private native void shutdown();
/*
* Instead of blocking the "AWT-Windows" thread uselessly on a semaphore,
* use these functions. startSecondaryEventLoop() corresponds to wait()
* and quitSecondaryEventLoop() corresponds to notify.
*
* These functions simulate blocking while allowing the AWT to continue
* processing native events, eliminating a potential deadlock situation
* with SendMessage.
*
* WARNING: startSecondaryEventLoop must only be called from the "AWT-
* Windows" thread.
*/
public static native void startSecondaryEventLoop();
public static native void quitSecondaryEventLoop();
/*
* Create peer objects.
*/
public ButtonPeer createButton(Button target) {
ButtonPeer peer = new WButtonPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public TextFieldPeer createTextField(TextField target) {
TextFieldPeer peer = new WTextFieldPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public LabelPeer createLabel(Label target) {
LabelPeer peer = new WLabelPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public ListPeer createList(List target) {
ListPeer peer = new WListPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public CheckboxPeer createCheckbox(Checkbox target) {
CheckboxPeer peer = new WCheckboxPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public ScrollbarPeer createScrollbar(Scrollbar target) {
ScrollbarPeer peer = new WScrollbarPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public ScrollPanePeer createScrollPane(ScrollPane target) {
ScrollPanePeer peer = new WScrollPanePeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public TextAreaPeer createTextArea(TextArea target) {
TextAreaPeer peer = new WTextAreaPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public ChoicePeer createChoice(Choice target) {
ChoicePeer peer = new WChoicePeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public FramePeer createFrame(Frame target) {
FramePeer peer = new WFramePeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public FramePeer createLightweightFrame(LightweightFrame target) {
FramePeer peer = new WLightweightFramePeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public CanvasPeer createCanvas(Canvas target) {
CanvasPeer peer = new WCanvasPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
@SuppressWarnings("deprecation")
public void disableBackgroundErase(Canvas canvas) {
WCanvasPeer peer = (WCanvasPeer)canvas.getPeer();
if (peer == null) {
throw new IllegalStateException("Canvas must have a valid peer");
}
peer.disableBackgroundErase();
}
public PanelPeer createPanel(Panel target) {
PanelPeer peer = new WPanelPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public WindowPeer createWindow(Window target) {
WindowPeer peer = new WWindowPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public DialogPeer createDialog(Dialog target) {
DialogPeer peer = new WDialogPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public FileDialogPeer createFileDialog(FileDialog target) {
FileDialogPeer peer = new WFileDialogPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public MenuBarPeer createMenuBar(MenuBar target) {
MenuBarPeer peer = new WMenuBarPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public MenuPeer createMenu(Menu target) {
MenuPeer peer = new WMenuPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public PopupMenuPeer createPopupMenu(PopupMenu target) {
PopupMenuPeer peer = new WPopupMenuPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public MenuItemPeer createMenuItem(MenuItem target) {
MenuItemPeer peer = new WMenuItemPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public CheckboxMenuItemPeer createCheckboxMenuItem(CheckboxMenuItem target) {
CheckboxMenuItemPeer peer = new WCheckboxMenuItemPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public RobotPeer createRobot(Robot target, GraphicsDevice screen) {
// (target is unused for now)
// Robot's don't need to go in the peer map since
// they're not Component's
return new WRobotPeer(screen);
}
public WEmbeddedFramePeer createEmbeddedFrame(WEmbeddedFrame target) {
WEmbeddedFramePeer peer = new WEmbeddedFramePeer(target);
targetCreatedPeer(target, peer);
return peer;
}
WPrintDialogPeer createWPrintDialog(WPrintDialog target) {
WPrintDialogPeer peer = new WPrintDialogPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
WPageDialogPeer createWPageDialog(WPageDialog target) {
WPageDialogPeer peer = new WPageDialogPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public TrayIconPeer createTrayIcon(TrayIcon target) {
WTrayIconPeer peer = new WTrayIconPeer(target);
targetCreatedPeer(target, peer);
return peer;
}
public SystemTrayPeer createSystemTray(SystemTray target) {
return new WSystemTrayPeer(target);
}
public boolean isTraySupported() {
return true;
}
public KeyboardFocusManagerPeer getKeyboardFocusManagerPeer()
throws HeadlessException
{
return WKeyboardFocusManagerPeer.getInstance();
}
protected native void setDynamicLayoutNative(boolean b);
public void setDynamicLayout(boolean b) {
if (b == dynamicLayoutSetting) {
return;
}
dynamicLayoutSetting = b;
setDynamicLayoutNative(b);
}
protected boolean isDynamicLayoutSet() {
return dynamicLayoutSetting;
}
/*
* Called from lazilyLoadDynamicLayoutSupportedProperty because
* Windows doesn't always send WM_SETTINGCHANGE when it should.
*/
protected native boolean isDynamicLayoutSupportedNative();
public boolean isDynamicLayoutActive() {
return (isDynamicLayoutSet() && isDynamicLayoutSupported());
}
/**
* Returns <code>true</code> if this frame state is supported.
*/
public boolean isFrameStateSupported(int state) {
switch (state) {
case Frame.NORMAL:
case Frame.ICONIFIED:
case Frame.MAXIMIZED_BOTH:
return true;
default:
return false;
}
}
static native ColorModel makeColorModel();
static ColorModel screenmodel;
static ColorModel getStaticColorModel() {
if (GraphicsEnvironment.isHeadless()) {
throw new IllegalArgumentException();
}
if (config == null) {
resetGC();
}
return config.getColorModel();
}
public ColorModel getColorModel() {
return getStaticColorModel();
}
public Insets getScreenInsets(GraphicsConfiguration gc)
{
return getScreenInsets(((Win32GraphicsDevice) gc.getDevice()).getScreen());
}
public int getScreenResolution() {
Win32GraphicsEnvironment ge = (Win32GraphicsEnvironment)
GraphicsEnvironment.getLocalGraphicsEnvironment();
return ge.getXResolution();
}
protected native int getScreenWidth();
protected native int getScreenHeight();
protected native Insets getScreenInsets(int screen);
public FontMetrics getFontMetrics(Font font) {
// This is an unsupported hack, but left in for a customer.
// Do not remove.
FontManager fm = FontManagerFactory.getInstance();
if (fm instanceof SunFontManager
&& ((SunFontManager) fm).usePlatformFontMetrics()) {
return WFontMetrics.getFontMetrics(font);
}
return super.getFontMetrics(font);
}
public FontPeer getFontPeer(String name, int style) {
FontPeer retval = null;
String lcName = name.toLowerCase();
if (null != cacheFontPeer) {
retval = cacheFontPeer.get(lcName + style);
if (null != retval) {
return retval;
}
}
retval = new WFontPeer(name, style);
if (retval != null) {
if (null == cacheFontPeer) {
cacheFontPeer = new Hashtable<>(5, 0.9f);
}
if (null != cacheFontPeer) {
cacheFontPeer.put(lcName + style, retval);
}
}
return retval;
}
private native void nativeSync();
public void sync() {
// flush the GDI/DD buffers
nativeSync();
// now flush the OGL pipeline (this is a no-op if OGL is not enabled)
OGLRenderQueue.sync();
// now flush the D3D pipeline (this is a no-op if D3D is not enabled)
D3DRenderQueue.sync();
}
public PrintJob getPrintJob(Frame frame, String doctitle,
Properties props) {
return getPrintJob(frame, doctitle, null, null);
}
public PrintJob getPrintJob(Frame frame, String doctitle,
JobAttributes jobAttributes,
PageAttributes pageAttributes)
{
if (frame == null) {
throw new NullPointerException("frame must not be null");
}
PrintJob2D printJob = new PrintJob2D(frame, doctitle,
jobAttributes, pageAttributes);
if (printJob.printDialog() == false) {
printJob = null;
}
return printJob;
}
public native void beep();
public boolean getLockingKeyState(int key) {
if (! (key == KeyEvent.VK_CAPS_LOCK || key == KeyEvent.VK_NUM_LOCK ||
key == KeyEvent.VK_SCROLL_LOCK || key == KeyEvent.VK_KANA_LOCK)) {
throw new IllegalArgumentException("invalid key for Toolkit.getLockingKeyState");
}
return getLockingKeyStateNative(key);
}
public native boolean getLockingKeyStateNative(int key);
public void setLockingKeyState(int key, boolean on) {
if (! (key == KeyEvent.VK_CAPS_LOCK || key == KeyEvent.VK_NUM_LOCK ||
key == KeyEvent.VK_SCROLL_LOCK || key == KeyEvent.VK_KANA_LOCK)) {
throw new IllegalArgumentException("invalid key for Toolkit.setLockingKeyState");
}
setLockingKeyStateNative(key, on);
}
public native void setLockingKeyStateNative(int key, boolean on);
public Clipboard getSystemClipboard() {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPermission(SecurityConstants.AWT.ACCESS_CLIPBOARD_PERMISSION);
}
synchronized (this) {
if (clipboard == null) {
clipboard = new WClipboard();
}
}
return clipboard;
}
protected native void loadSystemColors(int[] systemColors);
public static final Object targetToPeer(Object target) {
return SunToolkit.targetToPeer(target);
}
public static final void targetDisposedPeer(Object target, Object peer) {
SunToolkit.targetDisposedPeer(target, peer);
}
/**
* Returns a new input method adapter descriptor for native input methods.
*/
public InputMethodDescriptor getInputMethodAdapterDescriptor() {
return new WInputMethodDescriptor();
}
/**
* Returns a style map for the input method highlight.
*/
public Map<java.awt.font.TextAttribute,?> mapInputMethodHighlight(
InputMethodHighlight highlight)
{
return WInputMethod.mapInputMethodHighlight(highlight);
}
/**
* Returns whether enableInputMethods should be set to true for peered
* TextComponent instances on this platform.
*/
public boolean enableInputMethodsForTextComponent() {
return true;
}
/**
* Returns the default keyboard locale of the underlying operating system
*/
public Locale getDefaultKeyboardLocale() {
Locale locale = WInputMethod.getNativeLocale();
if (locale == null) {
return super.getDefaultKeyboardLocale();
} else {
return locale;
}
}
/**
* Returns a new custom cursor.
*/
public Cursor createCustomCursor(Image cursor, Point hotSpot, String name)
throws IndexOutOfBoundsException {
return new WCustomCursor(cursor, hotSpot, name);
}
/**
* Returns the supported cursor size (Win32 only has one).
*/
public Dimension getBestCursorSize(int preferredWidth, int preferredHeight) {
return new Dimension(WCustomCursor.getCursorWidth(),
WCustomCursor.getCursorHeight());
}
public native int getMaximumCursorColors();
static void paletteChanged() {
((Win32GraphicsEnvironment)GraphicsEnvironment
.getLocalGraphicsEnvironment())
.paletteChanged();
}
/*
* Called from Toolkit native code when a WM_DISPLAYCHANGE occurs.
* Have Win32GraphicsEnvironment execute the display change code on the
* Event thread.
*/
static public void displayChanged() {
EventQueue.invokeLater(new Runnable() {
public void run() {
((Win32GraphicsEnvironment)GraphicsEnvironment
.getLocalGraphicsEnvironment())
.displayChanged();
}
});
}
/**
* create the peer for a DragSourceContext
*/
public DragSourceContextPeer createDragSourceContextPeer(DragGestureEvent dge) throws InvalidDnDOperationException {
return WDragSourceContextPeer.createDragSourceContextPeer(dge);
}
public <T extends DragGestureRecognizer> T
createDragGestureRecognizer(Class<T> abstractRecognizerClass,
DragSource ds, Component c, int srcActions,
DragGestureListener dgl)
{
if (MouseDragGestureRecognizer.class.equals(abstractRecognizerClass))
return (T)new WMouseDragGestureRecognizer(ds, c, srcActions, dgl);
else
return null;
}
/**
*
*/
private static final String prefix = "DnD.Cursor.";
private static final String postfix = ".32x32";
private static final String awtPrefix = "awt.";
private static final String dndPrefix = "DnD.";
protected Object lazilyLoadDesktopProperty(String name) {
if (name.startsWith(prefix)) {
String cursorName = name.substring(prefix.length(), name.length()) + postfix;
try {
return Cursor.getSystemCustomCursor(cursorName);
} catch (AWTException awte) {
throw new RuntimeException("cannot load system cursor: " + cursorName, awte);
}
}
if (name.equals("awt.dynamicLayoutSupported")) {
return Boolean.valueOf(isDynamicLayoutSupported());
}
if (WDesktopProperties.isWindowsProperty(name) ||
name.startsWith(awtPrefix) || name.startsWith(dndPrefix))
{
synchronized(this) {
lazilyInitWProps();
return desktopProperties.get(name);
}
}
return super.lazilyLoadDesktopProperty(name);
}
private synchronized void lazilyInitWProps() {
if (wprops == null) {
wprops = new WDesktopProperties(this);
updateProperties();
}
}
/*
* Called from lazilyLoadDesktopProperty because Windows doesn't
* always send WM_SETTINGCHANGE when it should.
*/
private synchronized boolean isDynamicLayoutSupported() {
boolean nativeDynamic = isDynamicLayoutSupportedNative();
lazilyInitWProps();
Boolean prop = (Boolean) desktopProperties.get("awt.dynamicLayoutSupported");
if (log.isLoggable(PlatformLogger.Level.FINER)) {
log.finer("In WTK.isDynamicLayoutSupported()" +
" nativeDynamic == " + nativeDynamic +
" wprops.dynamic == " + prop);
}
if ((prop == null) || (nativeDynamic != prop.booleanValue())) {
// We missed the WM_SETTINGCHANGE, so we pretend
// we just got one - fire the propertyChange, etc.
windowsSettingChange();
return nativeDynamic;
}
return prop.booleanValue();
}
/*
* Called from native toolkit code when WM_SETTINGCHANGE message received
* Also called from lazilyLoadDynamicLayoutSupportedProperty because
* Windows doesn't always send WM_SETTINGCHANGE when it should.
*/
private void windowsSettingChange() {
EventQueue.invokeLater(new Runnable() {
public void run() {
updateProperties();
}
});
}
private synchronized void updateProperties() {
if (null == wprops) {
// wprops has not been initialized, so we have nothing to update
return;
}
Map<String, Object> props = wprops.getProperties();
for (String propName : props.keySet()) {
Object val = props.get(propName);
if (log.isLoggable(PlatformLogger.Level.FINER)) {
log.finer("changed " + propName + " to " + val);
}
setDesktopProperty(propName, val);
}
}
public synchronized void addPropertyChangeListener(String name, PropertyChangeListener pcl) {
if (name == null) {
// See JavaDoc for the Toolkit.addPropertyChangeListener() method
return;
}
if ( WDesktopProperties.isWindowsProperty(name)
|| name.startsWith(awtPrefix)
|| name.startsWith(dndPrefix))
{
// someone is interested in Windows-specific desktop properties
// we should initialize wprops
lazilyInitWProps();
}
super.addPropertyChangeListener(name, pcl);
}
/*
* initialize only static props here and do not try to initialize props which depends on wprops,
* this should be done in lazilyLoadDesktopProperty() only.
*/
protected synchronized void initializeDesktopProperties() {
desktopProperties.put("DnD.Autoscroll.initialDelay",
Integer.valueOf(50));
desktopProperties.put("DnD.Autoscroll.interval",
Integer.valueOf(50));
desktopProperties.put("DnD.isDragImageSupported",
Boolean.TRUE);
desktopProperties.put("Shell.shellFolderManager",
"sun.awt.shell.Win32ShellFolderManager2");
}
/*
* This returns the value for the desktop property "awt.font.desktophints"
* This requires that the Windows properties have already been gathered.
*/
protected synchronized RenderingHints getDesktopAAHints() {
if (wprops == null) {
return null;
} else {
return wprops.getDesktopAAHints();
}
}
public boolean isModalityTypeSupported(Dialog.ModalityType modalityType) {
return (modalityType == null) ||
(modalityType == Dialog.ModalityType.MODELESS) ||
(modalityType == Dialog.ModalityType.DOCUMENT_MODAL) ||
(modalityType == Dialog.ModalityType.APPLICATION_MODAL) ||
(modalityType == Dialog.ModalityType.TOOLKIT_MODAL);
}
public boolean isModalExclusionTypeSupported(Dialog.ModalExclusionType exclusionType) {
return (exclusionType == null) ||
(exclusionType == Dialog.ModalExclusionType.NO_EXCLUDE) ||
(exclusionType == Dialog.ModalExclusionType.APPLICATION_EXCLUDE) ||
(exclusionType == Dialog.ModalExclusionType.TOOLKIT_EXCLUDE);
}
public static WToolkit getWToolkit() {
WToolkit toolkit = (WToolkit)Toolkit.getDefaultToolkit();
return toolkit;
}
/**
* There are two reasons why we don't use buffer per window when
* Vista's DWM (aka Aero) is enabled:
* - since with DWM all windows are already double-buffered, the application
* doesn't get expose events so we don't get to use our true back-buffer,
* wasting memory and performance (this is valid for both d3d and gdi
* pipelines)
* - in some cases with buffer per window enabled it is possible for the
* paint manager to redirect rendering to the screen for some operations
* (like copyArea), and since bpw uses its own BufferStrategy the
* d3d onscreen rendering support is disabled and rendering goes through
* GDI. This doesn't work well with Vista's DWM since one
* can not perform GDI and D3D operations on the same surface
* (see 6630702 for more info)
*
* Note: even though DWM composition state can change during the lifetime
* of the application it is a rare event, and it is more often that it
* is temporarily disabled (because of some app) than it is getting
* permanently enabled so we can live with this approach without the
* complexity of dwm state listeners and such. This can be revisited if
* proved otherwise.
*/
@Override
public boolean useBufferPerWindow() {
return !Win32GraphicsEnvironment.isDWMCompositionEnabled();
}
@SuppressWarnings("deprecation")
public void grab(Window w) {
if (w.getPeer() != null) {
((WWindowPeer)w.getPeer()).grab();
}
}
@SuppressWarnings("deprecation")
public void ungrab(Window w) {
if (w.getPeer() != null) {
((WWindowPeer)w.getPeer()).ungrab();
}
}
public native boolean syncNativeQueue(final long timeout);
public boolean isDesktopSupported() {
return true;
}
public DesktopPeer createDesktopPeer(Desktop target) {
return new WDesktopPeer();
}
public static native void setExtraMouseButtonsEnabledNative(boolean enable);
public boolean areExtraMouseButtonsEnabled() throws HeadlessException {
return areExtraMouseButtonsEnabled;
}
private native synchronized int getNumberOfButtonsImpl();
@Override
public int getNumberOfButtons(){
if (numberOfButtons == 0) {
numberOfButtons = getNumberOfButtonsImpl();
}
return (numberOfButtons > MAX_BUTTONS_SUPPORTED)? MAX_BUTTONS_SUPPORTED : numberOfButtons;
}
@Override
public boolean isWindowOpacitySupported() {
// supported in Win2K and later
return true;
}
@Override
public boolean isWindowShapingSupported() {
return true;
}
@Override
public boolean isWindowTranslucencySupported() {
// supported in Win2K and later
return true;
}
@Override
public boolean isTranslucencyCapable(GraphicsConfiguration gc) {
//XXX: worth checking if 8-bit? Anyway, it doesn't hurt.
return true;
}
// On MS Windows one must use the peer.updateWindow() to implement
// non-opaque windows.
@Override
public boolean needUpdateWindow() {
return true;
}
}
|
googleapis/google-cloud-java | 35,197 | java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/ListOccurrencesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1beta1;
/**
*
*
* <pre>
* Response for listing occurrences.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.ListOccurrencesResponse}
*/
public final class ListOccurrencesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1beta1.ListOccurrencesResponse)
ListOccurrencesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListOccurrencesResponse.newBuilder() to construct.
private ListOccurrencesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListOccurrencesResponse() {
occurrences_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListOccurrencesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListOccurrencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.ListOccurrencesResponse.class,
io.grafeas.v1beta1.ListOccurrencesResponse.Builder.class);
}
public static final int OCCURRENCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_;
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
@java.lang.Override
public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesOrBuilderList() {
return occurrences_;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
@java.lang.Override
public int getOccurrencesCount() {
return occurrences_.size();
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
@java.lang.Override
public io.grafeas.v1beta1.Occurrence getOccurrences(int index) {
return occurrences_.get(index);
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
@java.lang.Override
public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
return occurrences_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < occurrences_.size(); i++) {
output.writeMessage(1, occurrences_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < occurrences_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, occurrences_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1beta1.ListOccurrencesResponse)) {
return super.equals(obj);
}
io.grafeas.v1beta1.ListOccurrencesResponse other =
(io.grafeas.v1beta1.ListOccurrencesResponse) obj;
if (!getOccurrencesList().equals(other.getOccurrencesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getOccurrencesCount() > 0) {
hash = (37 * hash) + OCCURRENCES_FIELD_NUMBER;
hash = (53 * hash) + getOccurrencesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListOccurrencesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1beta1.ListOccurrencesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for listing occurrences.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.ListOccurrencesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1beta1.ListOccurrencesResponse)
io.grafeas.v1beta1.ListOccurrencesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListOccurrencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.ListOccurrencesResponse.class,
io.grafeas.v1beta1.ListOccurrencesResponse.Builder.class);
}
// Construct using io.grafeas.v1beta1.ListOccurrencesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
} else {
occurrences_ = null;
occurrencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor;
}
@java.lang.Override
public io.grafeas.v1beta1.ListOccurrencesResponse getDefaultInstanceForType() {
return io.grafeas.v1beta1.ListOccurrencesResponse.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1beta1.ListOccurrencesResponse build() {
io.grafeas.v1beta1.ListOccurrencesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1beta1.ListOccurrencesResponse buildPartial() {
io.grafeas.v1beta1.ListOccurrencesResponse result =
new io.grafeas.v1beta1.ListOccurrencesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(io.grafeas.v1beta1.ListOccurrencesResponse result) {
if (occurrencesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
occurrences_ = java.util.Collections.unmodifiableList(occurrences_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.occurrences_ = occurrences_;
} else {
result.occurrences_ = occurrencesBuilder_.build();
}
}
private void buildPartial0(io.grafeas.v1beta1.ListOccurrencesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1beta1.ListOccurrencesResponse) {
return mergeFrom((io.grafeas.v1beta1.ListOccurrencesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1beta1.ListOccurrencesResponse other) {
if (other == io.grafeas.v1beta1.ListOccurrencesResponse.getDefaultInstance()) return this;
if (occurrencesBuilder_ == null) {
if (!other.occurrences_.isEmpty()) {
if (occurrences_.isEmpty()) {
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureOccurrencesIsMutable();
occurrences_.addAll(other.occurrences_);
}
onChanged();
}
} else {
if (!other.occurrences_.isEmpty()) {
if (occurrencesBuilder_.isEmpty()) {
occurrencesBuilder_.dispose();
occurrencesBuilder_ = null;
occurrences_ = other.occurrences_;
bitField0_ = (bitField0_ & ~0x00000001);
occurrencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOccurrencesFieldBuilder()
: null;
} else {
occurrencesBuilder_.addAllMessages(other.occurrences_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
io.grafeas.v1beta1.Occurrence m =
input.readMessage(io.grafeas.v1beta1.Occurrence.parser(), extensionRegistry);
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(m);
} else {
occurrencesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_ =
java.util.Collections.emptyList();
private void ensureOccurrencesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
occurrences_ = new java.util.ArrayList<io.grafeas.v1beta1.Occurrence>(occurrences_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>
occurrencesBuilder_;
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() {
if (occurrencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(occurrences_);
} else {
return occurrencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public int getOccurrencesCount() {
if (occurrencesBuilder_ == null) {
return occurrences_.size();
} else {
return occurrencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public io.grafeas.v1beta1.Occurrence getOccurrences(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder setOccurrences(int index, io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.set(index, value);
onChanged();
} else {
occurrencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder setOccurrences(
int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.set(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder addOccurrences(io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(value);
onChanged();
} else {
occurrencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder addOccurrences(int index, io.grafeas.v1beta1.Occurrence value) {
if (occurrencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOccurrencesIsMutable();
occurrences_.add(index, value);
onChanged();
} else {
occurrencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder addOccurrences(io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder addOccurrences(
int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.add(index, builderForValue.build());
onChanged();
} else {
occurrencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder addAllOccurrences(
java.lang.Iterable<? extends io.grafeas.v1beta1.Occurrence> values) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, occurrences_);
onChanged();
} else {
occurrencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder clearOccurrences() {
if (occurrencesBuilder_ == null) {
occurrences_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
occurrencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public Builder removeOccurrences(int index) {
if (occurrencesBuilder_ == null) {
ensureOccurrencesIsMutable();
occurrences_.remove(index);
onChanged();
} else {
occurrencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder getOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) {
if (occurrencesBuilder_ == null) {
return occurrences_.get(index);
} else {
return occurrencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesOrBuilderList() {
if (occurrencesBuilder_ != null) {
return occurrencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(occurrences_);
}
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder() {
return getOccurrencesFieldBuilder()
.addBuilder(io.grafeas.v1beta1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder(int index) {
return getOccurrencesFieldBuilder()
.addBuilder(index, io.grafeas.v1beta1.Occurrence.getDefaultInstance());
}
/**
*
*
* <pre>
* The occurrences requested.
* </pre>
*
* <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code>
*/
public java.util.List<io.grafeas.v1beta1.Occurrence.Builder> getOccurrencesBuilderList() {
return getOccurrencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>
getOccurrencesFieldBuilder() {
if (occurrencesBuilder_ == null) {
occurrencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
io.grafeas.v1beta1.Occurrence,
io.grafeas.v1beta1.Occurrence.Builder,
io.grafeas.v1beta1.OccurrenceOrBuilder>(
occurrences_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
occurrences_ = null;
}
return occurrencesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next pagination token in the list response. It should be used as
* `page_token` for the following request. An empty value means no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1beta1.ListOccurrencesResponse)
}
// @@protoc_insertion_point(class_scope:grafeas.v1beta1.ListOccurrencesResponse)
private static final io.grafeas.v1beta1.ListOccurrencesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1beta1.ListOccurrencesResponse();
}
public static io.grafeas.v1beta1.ListOccurrencesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListOccurrencesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListOccurrencesResponse>() {
@java.lang.Override
public ListOccurrencesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListOccurrencesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListOccurrencesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1beta1.ListOccurrencesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,284 | java-vmwareengine/proto-google-cloud-vmwareengine-v1/src/main/java/com/google/cloud/vmwareengine/v1/FetchNetworkPolicyExternalAddressesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vmwareengine/v1/vmwareengine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vmwareengine.v1;
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest}
*/
public final class FetchNetworkPolicyExternalAddressesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest)
FetchNetworkPolicyExternalAddressesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use FetchNetworkPolicyExternalAddressesRequest.newBuilder() to construct.
private FetchNetworkPolicyExternalAddressesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FetchNetworkPolicyExternalAddressesRequest() {
networkPolicy_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FetchNetworkPolicyExternalAddressesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_FetchNetworkPolicyExternalAddressesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_FetchNetworkPolicyExternalAddressesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest.class,
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest.Builder
.class);
}
public static final int NETWORK_POLICY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object networkPolicy_ = "";
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The networkPolicy.
*/
@java.lang.Override
public java.lang.String getNetworkPolicy() {
java.lang.Object ref = networkPolicy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
networkPolicy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for networkPolicy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNetworkPolicyBytes() {
java.lang.Object ref = networkPolicy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
networkPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of external IP addresses to return in one page.
* The service may return fewer than this value.
* The maximum value is coerced to 1000.
* The default value of this field is 500.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(networkPolicy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, networkPolicy_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(networkPolicy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, networkPolicy_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest)) {
return super.equals(obj);
}
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest other =
(com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest) obj;
if (!getNetworkPolicy().equals(other.getNetworkPolicy())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NETWORK_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getNetworkPolicy().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [VmwareEngine.FetchNetworkPolicyExternalAddresses][google.cloud.vmwareengine.v1.VmwareEngine.FetchNetworkPolicyExternalAddresses]
* </pre>
*
* Protobuf type {@code google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest)
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_FetchNetworkPolicyExternalAddressesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_FetchNetworkPolicyExternalAddressesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest.class,
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest.Builder
.class);
}
// Construct using
// com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
networkPolicy_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vmwareengine.v1.VmwareengineProto
.internal_static_google_cloud_vmwareengine_v1_FetchNetworkPolicyExternalAddressesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
getDefaultInstanceForType() {
return com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest build() {
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
buildPartial() {
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest result =
new com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.networkPolicy_ = networkPolicy_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest) {
return mergeFrom(
(com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest other) {
if (other
== com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
.getDefaultInstance()) return this;
if (!other.getNetworkPolicy().isEmpty()) {
networkPolicy_ = other.networkPolicy_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
networkPolicy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object networkPolicy_ = "";
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The networkPolicy.
*/
public java.lang.String getNetworkPolicy() {
java.lang.Object ref = networkPolicy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
networkPolicy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for networkPolicy.
*/
public com.google.protobuf.ByteString getNetworkPolicyBytes() {
java.lang.Object ref = networkPolicy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
networkPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The networkPolicy to set.
* @return This builder for chaining.
*/
public Builder setNetworkPolicy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
networkPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearNetworkPolicy() {
networkPolicy_ = getDefaultInstance().getNetworkPolicy();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the network policy to query for assigned
* external IP addresses. Resource names are schemeless URIs that follow the
* conventions in https://cloud.google.com/apis/design/resource_names. For
* example:
* `projects/my-project/locations/us-central1/networkPolicies/my-policy`
* </pre>
*
* <code>
* string network_policy = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for networkPolicy to set.
* @return This builder for chaining.
*/
public Builder setNetworkPolicyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
networkPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of external IP addresses to return in one page.
* The service may return fewer than this value.
* The maximum value is coerced to 1000.
* The default value of this field is 500.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of external IP addresses to return in one page.
* The service may return fewer than this value.
* The maximum value is coerced to 1000.
* The default value of this field is 500.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of external IP addresses to return in one page.
* The service may return fewer than this value.
* The maximum value is coerced to 1000.
* The default value of this field is 500.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous
* `FetchNetworkPolicyExternalAddresses` call. Provide this to retrieve the
* subsequent page.
*
* When paginating, all parameters provided to
* `FetchNetworkPolicyExternalAddresses`, except for `page_size` and
* `page_token`, must match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest)
private static final com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest();
}
public static com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FetchNetworkPolicyExternalAddressesRequest>
PARSER =
new com.google.protobuf.AbstractParser<FetchNetworkPolicyExternalAddressesRequest>() {
@java.lang.Override
public FetchNetworkPolicyExternalAddressesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FetchNetworkPolicyExternalAddressesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FetchNetworkPolicyExternalAddressesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vmwareengine.v1.FetchNetworkPolicyExternalAddressesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/distributedlog | 35,376 | distributedlog-core/src/test/java/org/apache/distributedlog/TestBKLogSegmentWriter.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog;
import static com.google.common.base.Charsets.UTF_8;
import static org.junit.Assert.*;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import org.apache.bookkeeper.client.BKException;
import org.apache.bookkeeper.client.BookKeeper;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.bookkeeper.feature.SettableFeatureProvider;
import org.apache.bookkeeper.stats.AlertStatsLogger;
import org.apache.bookkeeper.stats.NullStatsLogger;
import org.apache.distributedlog.common.concurrent.FutureUtils;
import org.apache.distributedlog.common.util.PermitLimiter;
import org.apache.distributedlog.exceptions.BKTransmitException;
import org.apache.distributedlog.exceptions.EndOfStreamException;
import org.apache.distributedlog.exceptions.WriteCancelledException;
import org.apache.distributedlog.exceptions.WriteException;
import org.apache.distributedlog.exceptions.ZKException;
import org.apache.distributedlog.impl.BKNamespaceDriver;
import org.apache.distributedlog.impl.logsegment.BKLogSegmentEntryWriter;
import org.apache.distributedlog.impl.metadata.BKDLConfig;
import org.apache.distributedlog.lock.SessionLockFactory;
import org.apache.distributedlog.lock.ZKDistributedLock;
import org.apache.distributedlog.lock.ZKSessionLockFactory;
import org.apache.distributedlog.util.ConfUtils;
import org.apache.distributedlog.util.OrderedScheduler;
import org.apache.distributedlog.util.Utils;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
/**
* Test Case for BookKeeper Based Log Segment Writer.
*/
public class TestBKLogSegmentWriter extends TestDistributedLogBase {
@Rule
public TestName runtime = new TestName();
private OrderedScheduler scheduler;
private OrderedScheduler lockStateExecutor;
private ZooKeeperClient zkc;
private ZooKeeperClient zkc0;
private BookKeeperClient bkc;
@Before
@Override
public void setup() throws Exception {
super.setup();
scheduler = OrderedScheduler.newBuilder().corePoolSize(1).build();
lockStateExecutor = OrderedScheduler.newBuilder().corePoolSize(1).build();
// build zookeeper client
URI uri = createDLMURI("");
zkc = TestZooKeeperClientBuilder.newBuilder(conf)
.name("test-zkc")
.uri(uri)
.build();
zkc0 = TestZooKeeperClientBuilder.newBuilder(conf)
.name("test-zkc0")
.uri(uri)
.build();
// build bookkeeper client
BKDLConfig bkdlConfig = BKDLConfig.resolveDLConfig(zkc, uri);
bkc = BookKeeperClientBuilder.newBuilder()
.dlConfig(conf)
.name("test-bkc")
.ledgersPath(bkdlConfig.getBkLedgersPath())
.zkServers(BKNamespaceDriver.getZKServersFromDLUri(uri))
.build();
}
@After
@Override
public void teardown() throws Exception {
if (null != bkc) {
bkc.close();
}
if (null != zkc) {
zkc.close();
}
if (null != lockStateExecutor) {
lockStateExecutor.shutdown();
}
if (null != scheduler) {
scheduler.shutdown();
}
super.teardown();
}
private DistributedLogConfiguration newLocalConf() {
DistributedLogConfiguration confLocal = new DistributedLogConfiguration();
confLocal.addConfiguration(conf);
return confLocal;
}
private ZKDistributedLock createLock(String path,
ZooKeeperClient zkClient,
boolean acquireLock)
throws Exception {
try {
Utils.ioResult(Utils.zkAsyncCreateFullPathOptimistic(zkClient, path, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT));
} catch (ZKException zke) {
// node already exists
}
SessionLockFactory lockFactory = new ZKSessionLockFactory(
zkClient,
"test-lock",
lockStateExecutor,
0,
Long.MAX_VALUE,
conf.getZKSessionTimeoutMilliseconds(),
NullStatsLogger.INSTANCE
);
ZKDistributedLock lock = new ZKDistributedLock(
lockStateExecutor,
lockFactory,
path,
Long.MAX_VALUE,
NullStatsLogger.INSTANCE);
if (acquireLock) {
return Utils.ioResult(lock.asyncAcquire());
} else {
return lock;
}
}
private void closeWriterAndLock(BKLogSegmentWriter writer,
ZKDistributedLock lock)
throws Exception {
try {
Utils.ioResult(writer.asyncClose());
} finally {
Utils.closeQuietly(lock);
}
}
private void abortWriterAndLock(BKLogSegmentWriter writer,
ZKDistributedLock lock)
throws IOException {
try {
Utils.abort(writer, false);
} finally {
Utils.closeQuietly(lock);
}
}
private BKLogSegmentWriter createLogSegmentWriter(DistributedLogConfiguration conf,
long logSegmentSequenceNumber,
long startTxId,
ZKDistributedLock lock) throws Exception {
LedgerHandle lh = bkc.get().createLedger(3, 2, 2,
BookKeeper.DigestType.CRC32, conf.getBKDigestPW().getBytes(UTF_8));
return new BKLogSegmentWriter(
runtime.getMethodName(),
runtime.getMethodName(),
conf,
LogSegmentMetadata.LEDGER_METADATA_CURRENT_LAYOUT_VERSION,
new BKLogSegmentEntryWriter(lh),
lock,
startTxId,
logSegmentSequenceNumber,
scheduler,
NullStatsLogger.INSTANCE,
NullStatsLogger.INSTANCE,
new AlertStatsLogger(NullStatsLogger.INSTANCE, "test"),
PermitLimiter.NULL_PERMIT_LIMITER,
new SettableFeatureProvider("", 0),
ConfUtils.getConstDynConf(conf));
}
private LedgerHandle openLedgerNoRecovery(LedgerHandle lh) throws Exception {
return bkc.get().openLedgerNoRecovery(lh.getId(),
BookKeeper.DigestType.CRC32, conf.getBKDigestPW().getBytes(UTF_8));
}
private LedgerHandle openLedger(LedgerHandle lh) throws Exception {
return bkc.get().openLedger(lh.getId(),
BookKeeper.DigestType.CRC32, conf.getBKDigestPW().getBytes(UTF_8));
}
private void fenceLedger(LedgerHandle lh) throws Exception {
bkc.get().openLedger(lh.getId(), BookKeeper.DigestType.CRC32,
conf.getBKDigestPW().getBytes(UTF_8));
}
/**
* Close a segment log writer should flush buffered data.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testCloseShouldFlush() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// Use another lock to wait for writer releasing lock
ZKDistributedLock lock0 = createLock("/test/lock-" + runtime.getMethodName(), zkc0, false);
CompletableFuture<ZKDistributedLock> lockFuture0 = lock0.asyncAcquire();
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be -1",
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
10, writer.getPositionWithinLogSegment());
// close the writer should flush buffered data and release lock
closeWriterAndLock(writer, lock);
Utils.ioResult(lockFuture0);
lock0.checkOwnership();
assertEquals("Last tx id should still be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should become " + (numRecords - 1),
numRecords - 1, writer.getLastTxIdAcknowledged());
assertEquals("Position should still be " + numRecords,
10, writer.getPositionWithinLogSegment());
List<DLSN> dlsns = Utils.ioResult(FutureUtils.collect(futureList));
assertEquals("All records should be written",
numRecords, dlsns.size());
for (int i = 0; i < numRecords; i++) {
DLSN dlsn = dlsns.get(i);
assertEquals("Incorrent ledger sequence number",
0L, dlsn.getLogSegmentSequenceNo());
assertEquals("Incorrent entry id",
0L, dlsn.getEntryId());
assertEquals("Inconsistent slot id",
i, dlsn.getSlotId());
}
assertEquals("Last DLSN should be " + dlsns.get(dlsns.size() - 1),
dlsns.get(dlsns.size() - 1), writer.getLastDLSN());
LedgerHandle lh = getLedgerHandle(writer);
LedgerHandle readLh = openLedgerNoRecovery(lh);
assertTrue("Ledger " + lh.getId() + " should be closed", readLh.isClosed());
assertEquals("There should be two entries in ledger " + lh.getId(),
1L, readLh.getLastAddConfirmed());
}
/**
* Abort a segment log writer should just abort pending writes and not flush buffered data.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testAbortShouldNotFlush() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// Use another lock to wait for writer releasing lock
ZKDistributedLock lock0 = createLock("/test/lock-" + runtime.getMethodName(), zkc0, false);
CompletableFuture<ZKDistributedLock> lockFuture0 = lock0.asyncAcquire();
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be -1",
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
10, writer.getPositionWithinLogSegment());
// close the writer should flush buffered data and release lock
abortWriterAndLock(writer, lock);
Utils.ioResult(lockFuture0);
lock0.checkOwnership();
assertEquals("Last tx id should still be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should still be " + (numRecords - 1),
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should still be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should still be " + numRecords,
10, writer.getPositionWithinLogSegment());
for (int i = 0; i < numRecords; i++) {
try {
Utils.ioResult(futureList.get(i));
fail("Should be aborted record " + i + " with transmit exception");
} catch (WriteCancelledException wce) {
assertTrue("Record " + i + " should be aborted because of ledger fenced",
wce.getCause() instanceof BKTransmitException);
BKTransmitException bkte = (BKTransmitException) wce.getCause();
assertEquals("Record " + i + " should be aborted",
BKException.Code.InterruptedException, bkte.getBKResultCode());
}
}
// check no entries were written
LedgerHandle lh = getLedgerHandle(writer);
LedgerHandle readLh = openLedgerNoRecovery(lh);
assertTrue("Ledger " + lh.getId() + " should not be closed", readLh.isClosed());
assertEquals("There should be no entries in ledger " + lh.getId(),
LedgerHandle.INVALID_ENTRY_ID, readLh.getLastAddConfirmed());
}
/**
* Close a log segment writer that already detect ledger fenced, should not flush buffered data.
* And should throw exception on closing.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testCloseShouldNotFlushIfLedgerFenced() throws Exception {
testCloseShouldNotFlushIfInErrorState(BKException.Code.LedgerFencedException);
}
/**
* Close a log segment writer that is already in error state, should not flush buffered data.
*
* @throws Exception
*/
void testCloseShouldNotFlushIfInErrorState(int rcToFailComplete) throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// Use another lock to wait for writer releasing lock
ZKDistributedLock lock0 = createLock("/test/lock-" + runtime.getMethodName(), zkc0, false);
CompletableFuture<ZKDistributedLock> lockFuture0 = lock0.asyncAcquire();
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be -1",
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
10, writer.getPositionWithinLogSegment());
writer.setTransmitResult(rcToFailComplete);
// close the writer should release lock but not flush data
try {
closeWriterAndLock(writer, lock);
fail("Close a log segment writer in error state should throw exception");
} catch (BKTransmitException bkte) {
assertEquals("Inconsistent rc is thrown",
rcToFailComplete, bkte.getBKResultCode());
}
Utils.ioResult(lockFuture0);
lock0.checkOwnership();
assertEquals("Last tx id should still be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should still be " + (numRecords - 1),
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should still be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should still be " + numRecords,
10, writer.getPositionWithinLogSegment());
for (int i = 0; i < numRecords; i++) {
try {
Utils.ioResult(futureList.get(i));
fail("Should be aborted record " + i + " with transmit exception");
} catch (WriteCancelledException wce) {
assertTrue("Record " + i + " should be aborted because of ledger fenced",
wce.getCause() instanceof BKTransmitException);
BKTransmitException bkte = (BKTransmitException) wce.getCause();
assertEquals("Record " + i + " should be aborted",
rcToFailComplete, bkte.getBKResultCode());
}
}
// check no entries were written
LedgerHandle lh = getLedgerHandle(writer);
LedgerHandle readLh = openLedgerNoRecovery(lh);
assertFalse("Ledger " + lh.getId() + " should not be closed", readLh.isClosed());
assertEquals("There should be no entries in ledger " + lh.getId(),
LedgerHandle.INVALID_ENTRY_ID, readLh.getLastAddConfirmed());
}
/**
* Close the writer when ledger is fenced: it should release the lock, fail on flushing data and throw exception.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testCloseShouldFailIfLedgerFenced() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// Use another lock to wait for writer releasing lock
ZKDistributedLock lock0 = createLock("/test/lock-" + runtime.getMethodName(), zkc0, false);
CompletableFuture<ZKDistributedLock> lockFuture0 = lock0.asyncAcquire();
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be -1",
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
10, writer.getPositionWithinLogSegment());
// fence the ledger
fenceLedger(getLedgerHandle(writer));
// close the writer: it should release the lock, fail on flushing data and throw exception
try {
closeWriterAndLock(writer, lock);
fail("Close a log segment writer when ledger is fenced should throw exception");
} catch (BKTransmitException bkte) {
assertEquals("Inconsistent rc is thrown",
BKException.Code.LedgerFencedException, bkte.getBKResultCode());
}
Utils.ioResult(lockFuture0);
lock0.checkOwnership();
assertEquals("Last tx id should still be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should still be " + (numRecords - 1),
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should still be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should still be " + numRecords,
10, writer.getPositionWithinLogSegment());
for (int i = 0; i < numRecords; i++) {
try {
Utils.ioResult(futureList.get(i));
fail("Should be aborted record " + i + " with transmit exception");
} catch (BKTransmitException bkte) {
assertEquals("Record " + i + " should be aborted",
BKException.Code.LedgerFencedException, bkte.getBKResultCode());
}
}
// check no entries were written
LedgerHandle lh = getLedgerHandle(writer);
LedgerHandle readLh = openLedgerNoRecovery(lh);
assertTrue("Ledger " + lh.getId() + " should be closed", readLh.isClosed());
assertEquals("There should be no entries in ledger " + lh.getId(),
LedgerHandle.INVALID_ENTRY_ID, readLh.getLastAddConfirmed());
}
/**
* Abort should wait for outstanding transmits to be completed and cancel buffered data.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testAbortShouldFailAllWrites() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// Use another lock to wait for writer releasing lock
ZKDistributedLock lock0 = createLock("/test/lock-" + runtime.getMethodName(), zkc0, false);
CompletableFuture<ZKDistributedLock> lockFuture0 = lock0.asyncAcquire();
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be -1",
-1L, writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
numRecords, writer.getPositionWithinLogSegment());
final CountDownLatch deferLatch = new CountDownLatch(1);
writer.getFuturePool().submit(() -> {
try {
deferLatch.await();
} catch (InterruptedException e) {
LOG.warn("Interrupted on deferring completion : ", e);
}
});
// transmit the buffered data
Utils.ioResult(writer.flush());
// add another 10 records
List<CompletableFuture<DLSN>> anotherFutureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = numRecords; i < 2 * numRecords; i++) {
anotherFutureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
assertEquals("Last tx id should become " + (2 * numRecords - 1),
2 * numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should become " + (numRecords - 1),
(long) (numRecords - 1), writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should still be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should become " + (2 * numRecords),
2 * numRecords, writer.getPositionWithinLogSegment());
// abort the writer: it waits for outstanding transmits and abort buffered data
abortWriterAndLock(writer, lock);
Utils.ioResult(lockFuture0);
lock0.checkOwnership();
// release defer latch so completion would go through
deferLatch.countDown();
List<DLSN> dlsns = Utils.ioResult(FutureUtils.collect(futureList));
assertEquals("All first 10 records should be written",
numRecords, dlsns.size());
for (int i = 0; i < numRecords; i++) {
DLSN dlsn = dlsns.get(i);
assertEquals("Incorrent ledger sequence number",
0L, dlsn.getLogSegmentSequenceNo());
assertEquals("Incorrent entry id",
0L, dlsn.getEntryId());
assertEquals("Inconsistent slot id",
i, dlsn.getSlotId());
}
for (int i = 0; i < numRecords; i++) {
try {
Utils.ioResult(anotherFutureList.get(i));
fail("Should be aborted record " + (numRecords + i) + " with transmit exception");
} catch (WriteCancelledException wce) {
// writes should be cancelled.
}
}
assertEquals("Last tx id should still be " + (2 * numRecords - 1),
2 * numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be still " + (numRecords - 1),
(long) (numRecords - 1), writer.getLastTxIdAcknowledged());
assertEquals("Last DLSN should become " + futureList.get(futureList.size() - 1),
dlsns.get(futureList.size() - 1), writer.getLastDLSN());
assertEquals("Position should become " + 2 * numRecords,
2 * numRecords, writer.getPositionWithinLogSegment());
// check only 1 entry were written
LedgerHandle lh = getLedgerHandle(writer);
LedgerHandle readLh = openLedgerNoRecovery(lh);
assertTrue("Ledger " + lh.getId() + " should not be closed", readLh.isClosed());
assertEquals("Only one entry is written for ledger " + lh.getId(),
0L, lh.getLastAddPushed());
assertEquals("Only one entry is written for ledger " + lh.getId(),
0L, readLh.getLastAddConfirmed());
}
/**
* Log Segment Writer should only update last tx id only for user records.
*/
@Test(timeout = 60000)
public void testUpdateLastTxIdForUserRecords() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// add 10 records
int numRecords = 10;
List<CompletableFuture<DLSN>> futureList = new ArrayList<CompletableFuture<DLSN>>(numRecords);
for (int i = 0; i < numRecords; i++) {
futureList.add(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(i)));
}
LogRecord controlRecord = DLMTestUtil.getLogRecordInstance(9999L);
controlRecord.setControl();
futureList.add(writer.asyncWrite(controlRecord));
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last DLSN should be " + DLSN.InvalidDLSN,
DLSN.InvalidDLSN, writer.getLastDLSN());
assertEquals("Position should be " + numRecords,
numRecords, writer.getPositionWithinLogSegment());
// close the writer to flush the output buffer
closeWriterAndLock(writer, lock);
List<DLSN> dlsns = Utils.ioResult(FutureUtils.collect(futureList));
assertEquals("All 11 records should be written",
numRecords + 1, dlsns.size());
for (int i = 0; i < numRecords; i++) {
DLSN dlsn = dlsns.get(i);
assertEquals("Incorrent ledger sequence number",
0L, dlsn.getLogSegmentSequenceNo());
assertEquals("Incorrent entry id",
0L, dlsn.getEntryId());
assertEquals("Inconsistent slot id",
i, dlsn.getSlotId());
}
DLSN dlsn = dlsns.get(numRecords);
assertEquals("Incorrent ledger sequence number",
0L, dlsn.getLogSegmentSequenceNo());
assertEquals("Incorrent entry id",
1L, dlsn.getEntryId());
assertEquals("Inconsistent slot id",
0L, dlsn.getSlotId());
assertEquals("Last tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxId());
assertEquals("Last acked tx id should be " + (numRecords - 1),
numRecords - 1, writer.getLastTxIdAcknowledged());
assertEquals("Position should be " + numRecords,
numRecords, writer.getPositionWithinLogSegment());
assertEquals("Last DLSN should be " + dlsn,
dlsns.get(numRecords - 1), writer.getLastDLSN());
}
/**
* Non durable write should fail if writer is closed.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testNondurableWriteAfterWriterIsClosed() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
confLocal.setDurableWriteEnabled(false);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// close the writer
closeWriterAndLock(writer, lock);
Utils.ioResult(writer.asyncClose());
try {
Utils.ioResult(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(1)));
fail("Should fail the write if the writer is closed");
} catch (WriteException we) {
// expected
}
}
/**
* Non durable write should fail if writer is marked as end of stream.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testNondurableWriteAfterEndOfStream() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
confLocal.setDurableWriteEnabled(false);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
Utils.ioResult(writer.markEndOfStream());
try {
Utils.ioResult(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(1)));
fail("Should fail the write if the writer is marked as end of stream");
} catch (EndOfStreamException we) {
// expected
}
closeWriterAndLock(writer, lock);
}
/**
* Non durable write should fail if the log segment is fenced.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testNondurableWriteAfterLedgerIsFenced() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
confLocal.setDurableWriteEnabled(false);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
// fence the ledger
fenceLedger(getLedgerHandle(writer));
LogRecord record = DLMTestUtil.getLogRecordInstance(1);
record.setControl();
try {
Utils.ioResult(writer.asyncWrite(record));
fail("Should fail the writer if the log segment is already fenced");
} catch (BKTransmitException bkte) {
// expected
assertEquals(BKException.Code.LedgerFencedException, bkte.getBKResultCode());
}
try {
Utils.ioResult(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(2)));
fail("Should fail the writer if the log segment is already fenced");
} catch (WriteException we) {
// expected
}
abortWriterAndLock(writer, lock);
}
/**
* Non durable write should fail if writer is marked as end of stream.
*
* @throws Exception
*/
@Test(timeout = 60000)
public void testNondurableWrite() throws Exception {
DistributedLogConfiguration confLocal = newLocalConf();
confLocal.setImmediateFlushEnabled(false);
confLocal.setOutputBufferSize(Integer.MAX_VALUE);
confLocal.setPeriodicFlushFrequencyMilliSeconds(0);
confLocal.setDurableWriteEnabled(false);
ZKDistributedLock lock = createLock("/test/lock-" + runtime.getMethodName(), zkc, true);
BKLogSegmentWriter writer =
createLogSegmentWriter(confLocal, 0L, -1L, lock);
assertEquals(DLSN.InvalidDLSN,
Utils.ioResult(writer.asyncWrite(DLMTestUtil.getLogRecordInstance(2))));
assertEquals(-1L, ((BKLogSegmentEntryWriter) writer.getEntryWriter())
.getLedgerHandle().getLastAddPushed());
closeWriterAndLock(writer, lock);
}
}
|
apache/doris-flink-connector | 35,073 | flink-doris-connector/src/test/java/org/apache/doris/flink/sink/DorisSinkITCase.java | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.flink.sink;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Deadline;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.runtime.minicluster.RpcServiceSharing;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.StringUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.doris.flink.catalog.doris.DataModel;
import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisReadOptions;
import org.apache.doris.flink.container.AbstractITCaseService;
import org.apache.doris.flink.container.ContainerUtils;
import org.apache.doris.flink.sink.DorisSink.Builder;
import org.apache.doris.flink.sink.batch.DorisBatchSink;
import org.apache.doris.flink.sink.writer.serializer.SimpleStringSerializer;
import org.apache.doris.flink.table.DorisConfigOptions;
import org.apache.doris.flink.utils.MockSource;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import static org.apache.flink.api.common.JobStatus.FINISHED;
import static org.apache.flink.api.common.JobStatus.RUNNING;
/** DorisSink ITCase with csv and arrow format. */
@RunWith(Parameterized.class)
public class DorisSinkITCase extends AbstractITCaseService {
private static final Logger LOG = LoggerFactory.getLogger(DorisSinkITCase.class);
static final String DATABASE = "test_sink";
static final String TABLE_CSV = "tbl_csv";
static final String TABLE_JSON = "tbl_json";
static final String TABLE_JSON_TBL = "tbl_json_tbl";
static final String TABLE_TBL_AUTO_REDIRECT = "tbl_tbl_auto_redirect";
static final String TABLE_CSV_BATCH_TBL = "tbl_csv_batch_tbl";
static final String TABLE_CSV_BATCH_DS = "tbl_csv_batch_DS";
static final String TABLE_GROUP_COMMIT = "tbl_group_commit";
static final String TABLE_OVERWRITE = "tbl_overwrite";
static final String TABLE_GZ_FORMAT = "tbl_gz_format";
static final String TABLE_CSV_JM = "tbl_csv_jm";
static final String TABLE_CSV_TM = "tbl_csv_tm";
private final boolean batchMode;
public DorisSinkITCase(boolean batchMode) {
this.batchMode = batchMode;
}
@Parameterized.Parameters(name = "batchMode: {0}")
public static Object[] parameters() {
return new Object[][] {new Object[] {false}, new Object[] {true}};
}
@Rule
public final MiniClusterWithClientResource miniClusterResource =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setNumberTaskManagers(1)
.setNumberSlotsPerTaskManager(2)
.setRpcServiceSharing(RpcServiceSharing.DEDICATED)
.withHaLeadershipControl()
.build());
@Test
public void testSinkCsvFormat() throws Exception {
initializeTable(TABLE_CSV, DataModel.UNIQUE);
Properties properties = new Properties();
properties.setProperty("column_separator", ",");
properties.setProperty("line_delimiter", "\n");
properties.setProperty("format", "csv");
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
executionBuilder
.setLabelPrefix(UUID.randomUUID().toString())
.setStreamLoadProp(properties)
.setDeletable(false)
.setBufferCount(4)
.setBufferSize(5 * 1024 * 1024)
.setBatchMode(batchMode);
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder
.setFenodes(getFenodes())
.setTableIdentifier(DATABASE + "." + TABLE_CSV)
.setUsername(getDorisUsername())
.setPassword(getDorisPassword());
submitJob(dorisBuilder.build(), executionBuilder.build(), new String[] {"doris,1"});
Thread.sleep(10000);
List<String> expected = Arrays.asList("doris,1");
String query = String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_CSV);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testSinkJsonFormat() throws Exception {
initializeTable(TABLE_JSON, DataModel.UNIQUE);
Properties properties = new Properties();
properties.setProperty("read_json_by_line", "true");
properties.setProperty("format", "json");
// mock data
Map<String, Object> row1 = new HashMap<>();
row1.put("name", "doris1");
row1.put("age", 1);
Map<String, Object> row2 = new HashMap<>();
row2.put("name", "doris2");
row2.put("age", 2);
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
executionBuilder
.setLabelPrefix(UUID.randomUUID().toString())
.setBatchMode(batchMode)
.setStreamLoadProp(properties)
// uniq need to be false
.setDeletable(false);
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder
.setFenodes(getFenodes())
.setTableIdentifier(DATABASE + "." + TABLE_JSON)
.setUsername(getDorisUsername())
.setPassword(getDorisPassword());
submitJob(
dorisBuilder.build(),
executionBuilder.build(),
new String[] {
new ObjectMapper().writeValueAsString(row1),
new ObjectMapper().writeValueAsString(row2)
});
Thread.sleep(10000);
List<String> expected = Arrays.asList("doris1,1", "doris2,2");
String query = String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_JSON);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
private void submitJob(
DorisOptions dorisOptions, DorisExecutionOptions executionOptions, String[] records)
throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
env.setParallelism(DEFAULT_PARALLELISM);
Builder<String> builder = DorisSink.builder();
final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder();
builder.setDorisReadOptions(readOptionBuilder.build())
.setDorisExecutionOptions(executionOptions)
.setSerializer(new SimpleStringSerializer())
.setDorisOptions(dorisOptions);
env.fromElements(records).sinkTo(builder.build());
env.execute();
}
@Test
public void testTableSinkJsonFormat() throws Exception {
initializeTable(TABLE_JSON_TBL, DataModel.DUPLICATE);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_sink ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'table.identifier' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.buffer-size' = '1MB',"
+ " 'sink.buffer-count' = '3',"
+ " 'sink.max-retries' = '1',"
+ " 'sink.enable-2pc' = 'true',"
+ " 'sink.use-cache' = 'true',"
+ " 'sink.enable-delete' = 'false',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.ignore.update-before' = 'true',"
+ " 'sink.properties.format' = 'json',"
+ " 'sink.properties.read_json_by_line' = 'true',"
+ " 'sink.label-prefix' = 'doris_sink"
+ UUID.randomUUID()
+ "'"
+ ")",
getFenodes(),
DATABASE + "." + TABLE_JSON_TBL,
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
tEnv.executeSql("INSERT INTO doris_sink SELECT 'doris',1 union all SELECT 'flink',2");
Thread.sleep(10000);
List<String> expected = Arrays.asList("doris,1", "flink,2");
String query =
String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_JSON_TBL);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testTableSinkAutoRedirectFalse() throws Exception {
if (StringUtils.isNullOrWhitespaceOnly(getBenodes())) {
LOG.info("benodes is empty, skip the test.");
return;
}
initializeTable(TABLE_TBL_AUTO_REDIRECT, DataModel.AGGREGATE);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_sink ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'benodes' = '%s',"
+ " 'auto-redirect' = 'false',"
+ " 'table.identifier' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.label-prefix' = 'doris_sink"
+ UUID.randomUUID()
+ "'"
+ ")",
getFenodes(),
getBenodes(),
DATABASE + "." + TABLE_TBL_AUTO_REDIRECT,
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
tEnv.executeSql("INSERT INTO doris_sink SELECT 'doris',1 union all SELECT 'flink',2");
Thread.sleep(10000);
List<String> expected = Arrays.asList("doris,1", "flink,2");
String query =
String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_JSON_TBL);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testTableBatch() throws Exception {
initializeTable(TABLE_CSV_BATCH_TBL, DataModel.UNIQUE_MOR);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_sink_batch ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'table.identifier' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.label-prefix' = '"
+ UUID.randomUUID()
+ "',"
+ " 'sink.properties.column_separator' = '\\x01',"
+ " 'sink.properties.line_delimiter' = '\\x02',"
+ " 'sink.ignore.update-before' = 'false',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.enable-delete' = 'true',"
+ " 'sink.flush.queue-size' = '2',"
+ " 'sink.buffer-flush.max-rows' = '10000',"
+ " 'sink.buffer-flush.max-bytes' = '10MB',"
+ " 'sink.buffer-flush.interval' = '1s'"
+ ")",
getFenodes(),
DATABASE + "." + TABLE_CSV_BATCH_TBL,
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
tEnv.executeSql("INSERT INTO doris_sink_batch SELECT 'doris',1 union all SELECT 'flink',2");
Thread.sleep(20000);
List<String> expected = Arrays.asList("doris,1", "flink,2");
String query =
String.format(
"select name,age from %s.%s order by 1", DATABASE, TABLE_CSV_BATCH_TBL);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testDataStreamBatch() throws Exception {
initializeTable(TABLE_CSV_BATCH_DS, DataModel.AGGREGATE);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
env.setParallelism(DEFAULT_PARALLELISM);
DorisBatchSink.Builder<String> builder = DorisBatchSink.builder();
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder
.setFenodes(getFenodes())
.setTableIdentifier(DATABASE + "." + TABLE_CSV_BATCH_DS)
.setUsername(getDorisUsername())
.setPassword(getDorisPassword());
Properties properties = new Properties();
properties.setProperty("column_separator", ",");
properties.setProperty("line_delimiter", "\n");
properties.setProperty("format", "csv");
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
executionBuilder
.setLabelPrefix(UUID.randomUUID().toString())
.setFlushQueueSize(3)
.setBatchMode(batchMode)
.setStreamLoadProp(properties)
.setBufferFlushMaxBytes(10485760)
.setBufferFlushMaxRows(10000)
.setBufferFlushIntervalMs(1000);
builder.setDorisExecutionOptions(executionBuilder.build())
.setSerializer(new SimpleStringSerializer())
.setDorisOptions(dorisBuilder.build());
env.fromElements("doris,1", "flink,2").sinkTo(builder.build());
env.execute();
Thread.sleep(20000);
List<String> expected = Arrays.asList("doris,1", "flink,2");
String query =
String.format(
"select name,age from %s.%s order by 1", DATABASE, TABLE_CSV_BATCH_DS);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testTableGroupCommit() throws Exception {
initializeTable(TABLE_GROUP_COMMIT, DataModel.DUPLICATE);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_group_commit_sink ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'table.identifier' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.label-prefix' = '"
+ UUID.randomUUID()
+ "',"
+ " 'sink.properties.column_separator' = '\\x01',"
+ " 'sink.properties.line_delimiter' = '\\x02',"
+ " 'sink.properties.group_commit' = 'sync_mode',"
+ " 'sink.ignore.update-before' = 'false',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.enable-delete' = 'true',"
+ " 'sink.flush.queue-size' = '2',"
+ " 'sink.buffer-flush.max-rows' = '10000',"
+ " 'sink.buffer-flush.max-bytes' = '10MB',"
+ " 'sink.buffer-flush.interval' = '1s'"
+ ")",
getFenodes(),
DATABASE + "." + TABLE_GROUP_COMMIT,
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
tEnv.executeSql(
"INSERT INTO doris_group_commit_sink SELECT 'doris',1 union all SELECT 'group_commit',2 union all SELECT 'flink',3");
Thread.sleep(25000);
List<String> expected = Arrays.asList("doris,1", "flink,3", "group_commit,2");
String query =
String.format(
"select name,age from %s.%s order by 1", DATABASE, TABLE_GROUP_COMMIT);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testTableGzFormat() throws Exception {
initializeTable(TABLE_GZ_FORMAT, DataModel.UNIQUE);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_gz_format_sink ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'table.identifier' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.enable-delete' = 'false',"
+ " 'sink.label-prefix' = '"
+ UUID.randomUUID()
+ "',"
+ " 'sink.properties.column_separator' = '\\x01',"
+ " 'sink.properties.line_delimiter' = '\\x02',"
+ " 'sink.properties.compress_type' = 'gz'"
+ ")",
getFenodes(),
DATABASE + "." + TABLE_GZ_FORMAT,
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
tEnv.executeSql(
"INSERT INTO doris_gz_format_sink SELECT 'doris',1 union all SELECT 'flink',2");
Thread.sleep(25000);
List<String> expected = Arrays.asList("doris,1", "flink,2");
String query =
String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_GZ_FORMAT);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2);
}
@Test
public void testJobManagerFailoverSink() throws Exception {
LOG.info("start to test JobManagerFailoverSink.");
initializeFailoverTable(TABLE_CSV_JM, DataModel.DUPLICATE);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.enableCheckpointing(10000);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 0));
DorisSink.Builder<String> builder = DorisSink.builder();
final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder();
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder
.setFenodes(getFenodes())
.setTableIdentifier(DATABASE + "." + TABLE_CSV_JM)
.setUsername(getDorisUsername())
.setPassword(getDorisPassword());
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
Properties properties = new Properties();
properties.setProperty("column_separator", ",");
properties.setProperty("line_delimiter", "\n");
properties.setProperty("format", "csv");
executionBuilder
.setLabelPrefix(UUID.randomUUID().toString())
.setStreamLoadProp(properties)
.setBatchMode(batchMode)
.setUseCache(true);
builder.setDorisReadOptions(readOptionBuilder.build())
.setDorisExecutionOptions(executionBuilder.build())
.setSerializer(new SimpleStringSerializer())
.setDorisOptions(dorisBuilder.build());
env.addSource(new MockSource(5)).sinkTo(builder.build());
JobClient jobClient = env.executeAsync();
waitForJobStatus(
jobClient,
Collections.singletonList(RUNNING),
Deadline.fromNow(Duration.ofSeconds(10)));
JobID jobID = jobClient.getJobID();
// wait checkpoint 2 times
Thread.sleep(20000);
LOG.info("trigger jobmanager failover...");
triggerFailover(
FailoverType.JM, jobID, miniClusterResource.getMiniCluster(), () -> sleepMs(100));
LOG.info("Waiting the JobManagerFailoverSink job to be finished. jobId={}", jobID);
waitForJobStatus(
jobClient,
Collections.singletonList(FINISHED),
Deadline.fromNow(Duration.ofSeconds(120)));
LOG.info("Will check job manager failover sink result.");
List<String> expected =
Arrays.asList("1,0", "1,1", "2,0", "2,1", "3,0", "3,1", "4,0", "4,1", "5,0", "5,1");
String query =
String.format("select id,task_id from %s.%s order by 1,2", DATABASE, TABLE_CSV_JM);
List<String> actualResult =
ContainerUtils.getResult(getDorisQueryConnection(), LOG, expected, query, 2);
Assert.assertTrue(
actualResult.size() >= expected.size() && actualResult.containsAll(expected));
}
@Test
public void testTaskManagerFailoverSink() throws Exception {
LOG.info("start to test TaskManagerFailoverSink.");
initializeFailoverTable(TABLE_CSV_TM, DataModel.DUPLICATE);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.enableCheckpointing(10000);
env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 0));
DorisSink.Builder<String> builder = DorisSink.builder();
final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder();
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder
.setFenodes(getFenodes())
.setTableIdentifier(DATABASE + "." + TABLE_CSV_TM)
.setUsername(getDorisUsername())
.setPassword(getDorisPassword());
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
Properties properties = new Properties();
properties.setProperty("column_separator", ",");
properties.setProperty("line_delimiter", "\n");
properties.setProperty("format", "csv");
executionBuilder
.setLabelPrefix(UUID.randomUUID().toString())
.setBatchMode(batchMode)
.setStreamLoadProp(properties);
builder.setDorisReadOptions(readOptionBuilder.build())
.setDorisExecutionOptions(executionBuilder.build())
.setSerializer(new SimpleStringSerializer())
.setDorisOptions(dorisBuilder.build());
env.addSource(new MockSource(5)).sinkTo(builder.build());
JobClient jobClient = env.executeAsync();
waitForJobStatus(
jobClient,
Collections.singletonList(RUNNING),
Deadline.fromNow(Duration.ofSeconds(10)));
JobID jobID = jobClient.getJobID();
// wait checkpoint 2 times
Thread.sleep(20000);
LOG.info("trigger taskmanager failover...");
triggerFailover(
FailoverType.TM, jobID, miniClusterResource.getMiniCluster(), () -> sleepMs(100));
LOG.info("Waiting the TaskManagerFailoverSink job to be finished. jobId={}", jobID);
waitForJobStatus(
jobClient,
Collections.singletonList(FINISHED),
Deadline.fromNow(Duration.ofSeconds(120)));
LOG.info("Will check task manager failover sink result.");
List<String> expected =
Arrays.asList("1,0", "1,1", "2,0", "2,1", "3,0", "3,1", "4,0", "4,1", "5,0", "5,1");
String query =
String.format("select id,task_id from %s.%s order by 1,2", DATABASE, TABLE_CSV_TM);
List<String> actualResult =
ContainerUtils.getResult(getDorisQueryConnection(), LOG, expected, query, 2);
Assert.assertTrue(
actualResult.size() >= expected.size() && actualResult.containsAll(expected));
}
@Test
public void testTableOverwrite() throws Exception {
LOG.info("start to test testTableOverwrite.");
initializeTable(TABLE_OVERWRITE, DataModel.AGGREGATE);
// mock data
ContainerUtils.executeSQLStatement(
getDorisQueryConnection(),
LOG,
String.format(
"INSERT INTO %s.%s values('history-data',12)", DATABASE, TABLE_OVERWRITE));
List<String> expected_his = Arrays.asList("history-data,12");
String query =
String.format("select name,age from %s.%s order by 1", DATABASE, TABLE_OVERWRITE);
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected_his, query, 2);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(DEFAULT_PARALLELISM);
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
String sinkDDL =
String.format(
"CREATE TABLE doris_overwrite_sink ("
+ " name STRING,"
+ " age INT"
+ ") WITH ("
+ " 'connector' = '"
+ DorisConfigOptions.IDENTIFIER
+ "',"
+ " 'fenodes' = '%s',"
+ " 'table.identifier' = '%s',"
+ " 'jdbc-url' = '%s',"
+ " 'username' = '%s',"
+ " 'password' = '%s',"
+ " 'sink.enable.batch-mode' = '%s',"
+ " 'sink.label-prefix' = '"
+ UUID.randomUUID()
+ "'"
+ ")",
getFenodes(),
DATABASE + "." + TABLE_OVERWRITE,
getDorisQueryUrl(),
getDorisUsername(),
getDorisPassword(),
batchMode);
tEnv.executeSql(sinkDDL);
TableResult tableResult =
tEnv.executeSql(
"INSERT OVERWRITE doris_overwrite_sink SELECT 'doris',1 union all SELECT 'overwrite',2 union all SELECT 'flink',3");
JobClient jobClient = tableResult.getJobClient().get();
waitForJobStatus(
jobClient,
Collections.singletonList(FINISHED),
Deadline.fromNow(Duration.ofSeconds(120)));
List<String> expected = Arrays.asList("doris,1", "flink,3", "overwrite,2");
ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, query, 2, false);
}
private void initializeTable(String table, DataModel dataModel) {
String max = DataModel.AGGREGATE.equals(dataModel) ? "MAX" : "";
String morProps =
!DataModel.UNIQUE_MOR.equals(dataModel)
? ""
: ",\"enable_unique_key_merge_on_write\" = \"false\"";
String model =
dataModel.equals(DataModel.UNIQUE_MOR)
? DataModel.UNIQUE.toString()
: dataModel.toString();
ContainerUtils.executeSQLStatement(
getDorisQueryConnection(),
LOG,
String.format("CREATE DATABASE IF NOT EXISTS %s", DATABASE),
String.format("DROP TABLE IF EXISTS %s.%s", DATABASE, table),
String.format(
"CREATE TABLE %s.%s ( \n"
+ "`name` varchar(256),\n"
+ "`age` int %s\n"
+ ") "
+ " %s KEY(`name`) "
+ " DISTRIBUTED BY HASH(`name`) BUCKETS 1\n"
+ "PROPERTIES ("
+ "\"replication_num\" = \"1\"\n"
+ morProps
+ ")",
DATABASE,
table,
max,
model));
}
private void initializeFailoverTable(String table, DataModel dataModel) {
String max = DataModel.AGGREGATE.equals(dataModel) ? "MAX" : "";
String morProps =
!DataModel.UNIQUE_MOR.equals(dataModel)
? ""
: ",\"enable_unique_key_merge_on_write\" = \"false\"";
String model =
dataModel.equals(DataModel.UNIQUE_MOR)
? DataModel.UNIQUE.toString()
: dataModel.toString();
ContainerUtils.executeSQLStatement(
getDorisQueryConnection(),
LOG,
String.format("CREATE DATABASE IF NOT EXISTS %s", DATABASE),
String.format("DROP TABLE IF EXISTS %s.%s", DATABASE, table),
String.format(
"CREATE TABLE %s.%s ( \n"
+ "`id` int,\n"
+ "`task_id` int %s\n"
+ ") "
+ " %s KEY(`id`) "
+ "DISTRIBUTED BY HASH(`id`) BUCKETS 1\n"
+ "PROPERTIES (\n"
+ "\"replication_num\" = \"1\"\n"
+ morProps
+ ")\n",
DATABASE,
table,
max,
model));
}
}
|
apache/systemds | 33,189 | src/main/java/org/apache/sysds/utils/Explain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.sysds.hops.Hop;
import org.apache.sysds.hops.LiteralOp;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.hops.codegen.cplan.CNode;
import org.apache.sysds.hops.codegen.cplan.CNodeMultiAgg;
import org.apache.sysds.hops.codegen.cplan.CNodeTpl;
import org.apache.sysds.hops.ipa.FunctionCallGraph;
import org.apache.sysds.lops.Lop;
import org.apache.sysds.parser.DMLProgram;
import org.apache.sysds.parser.ForStatement;
import org.apache.sysds.parser.ForStatementBlock;
import org.apache.sysds.parser.FunctionStatement;
import org.apache.sysds.parser.FunctionStatementBlock;
import org.apache.sysds.parser.IfStatement;
import org.apache.sysds.parser.IfStatementBlock;
import org.apache.sysds.parser.ParForStatementBlock;
import org.apache.sysds.parser.StatementBlock;
import org.apache.sysds.parser.WhileStatement;
import org.apache.sysds.parser.WhileStatementBlock;
import org.apache.sysds.runtime.controlprogram.BasicProgramBlock;
import org.apache.sysds.runtime.controlprogram.ForProgramBlock;
import org.apache.sysds.runtime.controlprogram.FunctionProgramBlock;
import org.apache.sysds.runtime.controlprogram.IfProgramBlock;
import org.apache.sysds.runtime.controlprogram.ParForProgramBlock;
import org.apache.sysds.runtime.controlprogram.Program;
import org.apache.sysds.runtime.controlprogram.ProgramBlock;
import org.apache.sysds.runtime.controlprogram.WhileProgramBlock;
import org.apache.sysds.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysds.runtime.instructions.Instruction;
import org.apache.sysds.runtime.instructions.cp.CPInstruction;
import org.apache.sysds.runtime.instructions.fed.FEDInstruction;
import org.apache.sysds.runtime.instructions.gpu.GPUInstruction;
import org.apache.sysds.runtime.instructions.ooc.OOCInstruction;
import org.apache.sysds.runtime.instructions.spark.CSVReblockSPInstruction;
import org.apache.sysds.runtime.instructions.spark.CheckpointSPInstruction;
import org.apache.sysds.runtime.instructions.spark.ReblockSPInstruction;
import org.apache.sysds.runtime.instructions.spark.SPInstruction;
import org.apache.sysds.runtime.lineage.LineageItem;
import org.apache.sysds.runtime.lineage.LineageItemUtils;
import org.apache.sysds.utils.stats.InfrastructureAnalyzer;
import org.apache.sysds.runtime.instructions.fed.FEDInstruction.FederatedOutput;
public class Explain
{
//internal configuration parameters
private static final boolean REPLACE_SPECIAL_CHARACTERS = true;
private static final boolean SHOW_MEM_ABOVE_BUDGET = true;
private static final boolean SHOW_LITERAL_HOPS = false;
private static final boolean SHOW_DATA_DEPENDENCIES = true;
private static final boolean SHOW_DATA_FLOW_PROPERTIES = true;
//different explain levels
public enum ExplainType {
NONE, // explain disabled
HOPS, // explain program and hops
RUNTIME, // explain runtime program (default)
RECOMPILE_HOPS, // explain hops, incl recompile
RECOMPILE_RUNTIME, // explain runtime program, incl recompile
CODEGEN, // show generated code, incl runtime explanation
CODEGEN_RECOMPILE; // show generated code, incl runtime explanation and recompilation
public boolean isHopsType(boolean recompile) {
return (this==RECOMPILE_HOPS || (!recompile && this==HOPS));
}
public boolean isRuntimeType(boolean recompile) {
return (this==RECOMPILE_RUNTIME || (!recompile && this==RUNTIME) || (this==CODEGEN_RECOMPILE) ||(!recompile && this==CODEGEN));
}
public boolean isCodegenType() {
return (this == CODEGEN || this == CODEGEN_RECOMPILE);
}
}
public static class ExplainCounts {
public int numCPInst = 0;
public int numJobs = 0;
public int numReblocks = 0;
public int numChkpts = 0;
}
//////////////
// public explain interface
public static String display(DMLProgram prog, Program rtprog, ExplainType type, ExplainCounts counts) {
if( counts == null )
counts = countDistributedOperations(rtprog);
//explain plan of program (hops or runtime)
return "# EXPLAIN ("+type.name()+"):\n"
+ Explain.explainMemoryBudget(counts)+"\n"
+ Explain.explainDegreeOfParallelism(counts)
+ Explain.explain(prog, rtprog, type, counts);
}
public static String explainMemoryBudget() {
return explainMemoryBudget(new ExplainCounts());
}
public static String explainMemoryBudget(ExplainCounts counts) {
StringBuilder sb = new StringBuilder();
sb.append( "# Memory Budget local/remote = " );
sb.append( OptimizerUtils.toMB(OptimizerUtils.getLocalMemBudget()) );
sb.append( "MB/" );
if( OptimizerUtils.isSparkExecutionMode() ) {
//avoid unnecessary lazy spark context creation on access to memory configurations
if( counts.numJobs-counts.numReblocks-counts.numChkpts <= 0
|| !SparkExecutionContext.isSparkContextCreated() ) {
sb.append( "?MB/?MB/?MB" );
}
else { //default
sb.append( OptimizerUtils.toMB(SparkExecutionContext.getDataMemoryBudget(true, false)) );
sb.append( "MB/" );
sb.append( OptimizerUtils.toMB(SparkExecutionContext.getDataMemoryBudget(false, false)) );
sb.append( "MB/" );
sb.append( OptimizerUtils.toMB(SparkExecutionContext.getBroadcastMemoryBudget()) );
sb.append( "MB" );
}
}
else {
sb.append( "?MB/?MB" );
}
return sb.toString();
}
public static String explainDegreeOfParallelism() {
return explainDegreeOfParallelism(new ExplainCounts());
}
public static String explainDegreeOfParallelism(ExplainCounts counts) {
int lk = InfrastructureAnalyzer.getLocalParallelism();
StringBuilder sb = new StringBuilder();
sb.append( "# Degree of Parallelism (vcores) local/remote = " );
sb.append( lk );
sb.append( "/" );
if( OptimizerUtils.isSparkExecutionMode() ) {
if( counts.numJobs-counts.numReblocks-counts.numChkpts <= 0
|| !SparkExecutionContext.isSparkContextCreated() ) {
//avoid unnecessary lazy spark context creation on access to memory configurations
sb.append( "?" );
}
else { //default
sb.append( SparkExecutionContext.getDefaultParallelism(false) );
}
}
return sb.toString();
}
public static String explain(DMLProgram prog, Program rtprog, ExplainType type) {
return explain(prog, rtprog, type, null);
}
public static String explain(DMLProgram prog, Program rtprog, ExplainType type, ExplainCounts counts) {
//dispatch to individual explain utils
switch( type ) {
//explain hops with stats
case HOPS:
case RECOMPILE_HOPS:
return explain(prog);
//explain runtime program
case RUNTIME:
case RECOMPILE_RUNTIME:
case CODEGEN:
case CODEGEN_RECOMPILE:
return explain(rtprog, counts);
case NONE:
//do nothing
}
return null;
}
public static String explain(DMLProgram prog)
{
StringBuilder sb = new StringBuilder();
//create header
sb.append("\nPROGRAM\n");
// Explain functions (if exists)
if( prog.hasFunctionStatementBlocks() ) {
sb.append("--FUNCTIONS\n");
//show function call graph
sb.append("----FUNCTION CALL GRAPH\n");
sb.append("------MAIN PROGRAM\n");
FunctionCallGraph fgraph = new FunctionCallGraph(prog);
sb.append(explainFunctionCallGraph(fgraph, new HashSet<String>(), null, 3));
//show individual functions
for (String namespace : prog.getNamespaces().keySet()) {
for (String fname : prog.getFunctionStatementBlocks(namespace).keySet()) {
FunctionStatementBlock fsb = prog.getFunctionStatementBlock(namespace, fname);
FunctionStatement fstmt = (FunctionStatement) fsb.getStatement(0);
String fkey = DMLProgram.constructFunctionKey(namespace, fname);
sb.append("----FUNCTION " + fkey + " [recompile="+fsb.isRecompileOnce()+"]\n");
for (StatementBlock current : fstmt.getBody())
sb.append(explainStatementBlock(current, 3));
}
}
}
// Explain main program
sb.append("--MAIN PROGRAM\n");
for( StatementBlock sblk : prog.getStatementBlocks() )
sb.append(explainStatementBlock(sblk, 2));
return sb.toString();
}
public static String explain( Program rtprog ) {
return explain(rtprog, null);
}
public static String explain( Program rtprog, ExplainCounts counts )
{
//counts number of instructions
boolean sparkExec = OptimizerUtils.isSparkExecutionMode();
if( counts == null ) {
counts = new ExplainCounts();
countCompiledInstructions(rtprog, counts, true, sparkExec);
}
StringBuilder sb = new StringBuilder();
//create header
sb.append("\nPROGRAM ( size CP/"+(sparkExec?"SP":"MR")+" = ");
sb.append(counts.numCPInst);
sb.append("/");
sb.append(counts.numJobs);
sb.append(" )\n");
//explain functions (if exists)
Map<String, FunctionProgramBlock> funcMap = rtprog.getFunctionProgramBlocks();
if( funcMap != null && !funcMap.isEmpty() )
{
sb.append("--FUNCTIONS\n");
//show function call graph
if( !rtprog.getProgramBlocks().isEmpty() &&
rtprog.getProgramBlocks().get(0).getStatementBlock() != null )
{
sb.append("----FUNCTION CALL GRAPH\n");
sb.append("------MAIN PROGRAM\n");
DMLProgram prog = rtprog.getProgramBlocks().get(0).getStatementBlock().getDMLProg();
FunctionCallGraph fgraph = new FunctionCallGraph(prog);
sb.append(explainFunctionCallGraph(fgraph, new HashSet<String>(), null, 3));
}
//show individual functions
for( Entry<String, FunctionProgramBlock> e : funcMap.entrySet() ) {
String fkey = e.getKey();
FunctionProgramBlock fpb = e.getValue();
//explain optimized function
sb.append("----FUNCTION "+fkey+" [recompile="+fpb.isRecompileOnce()+"]\n");
for( ProgramBlock pb : fpb.getChildBlocks() )
sb.append( explainProgramBlock(pb,3) );
//explain unoptimized function
if( rtprog.containsFunctionProgramBlock(fkey, false) ) {
FunctionProgramBlock fpb2 = rtprog.getFunctionProgramBlock(fkey, false);
sb.append("----FUNCTION "+fkey+" (unoptimized) [recompile="+fpb2.isRecompileOnce()+"]\n");
for( ProgramBlock pb : fpb2.getChildBlocks() )
sb.append( explainProgramBlock(pb,3) );
}
}
}
//explain main program
sb.append("--MAIN PROGRAM\n");
for( ProgramBlock pb : rtprog.getProgramBlocks() )
sb.append( explainProgramBlock(pb,2) );
return sb.toString();
}
public static String explain( ProgramBlock pb ) {
return explainProgramBlock(pb, 0);
}
public static String explain( List<Instruction> inst ) {
return explainInstructions(inst, 0);
}
public static String explain( List<Instruction> inst, int level ) {
return explainInstructions(inst, level);
}
public static String explain( Instruction inst ) {
return explainGenericInstruction(inst, 0);
}
public static String explain( StatementBlock sb ) {
return explainStatementBlock(sb, 0);
}
public static String explainHops( List<Hop> hops ) {
return explainHops(hops, 0);
}
public static String explainHops( List<Hop> hops, int level ) {
StringBuilder sb = new StringBuilder();
Hop.resetVisitStatus(hops);
for( Hop hop : hops )
sb.append(explainHop(hop, level));
Hop.resetVisitStatus(hops);
return sb.toString();
}
public static String explain( Hop hop ) {
return explain(hop, 0);
}
public static String explain( Hop hop, int level ) {
hop.resetVisitStatus();
String ret = explainHop(hop, level);
hop.resetVisitStatus();
return ret;
}
public static String explainLineageItems( LineageItem[] lis ) {
return explainLineageItems(lis, 0);
}
public static String explainLineageItems( LineageItem[] lis, int level ) {
StringBuilder sb = new StringBuilder();
LineageItem.resetVisitStatusNR(lis);
for( LineageItem li : lis )
sb.append(explainLineageItemNR(li, level));
LineageItem.resetVisitStatusNR(lis);
return sb.toString();
}
public static String explain( LineageItem li ) {
li.resetVisitStatusNR();
String s = explain(li, 0);
//s += rExplainDedupItems(li, new ArrayList<>());
li.resetVisitStatusNR();
return s;
}
private static String explain( LineageItem li, int level ) {
li.resetVisitStatusNR();
String ret = explainLineageItemNR(li, level);
li.resetVisitStatusNR();
return ret;
}
@Deprecated
@SuppressWarnings("unused")
private static String rExplainDedupItems(LineageItem li, List<String> paths) {
if (li.isVisited())
return "";
StringBuilder sb = new StringBuilder();
if (li.getType() == LineageItem.LineageItemType.Dedup && !paths.contains(li.getData())) {
sb.append("\n").append("dedup").append(li.getData()).append(":\n");
sb.append(Explain.explain(li, 0));
paths.add(li.getData());
}
if (li.getInputs() != null)
for (LineageItem in : li.getInputs())
sb.append(rExplainDedupItems(in, paths));
li.setVisited();
return sb.toString();
}
public static String explainCPlan( CNodeTpl cplan ) {
StringBuilder sb = new StringBuilder();
//create template header
sb.append("\n----------------------------------------\n");
sb.append("CPLAN: "+cplan.getTemplateInfo()+"\n");
sb.append("--inputs: "+Arrays.toString(cplan.getInputNames())+"\n");
sb.append("----------------------------------------\n");
//explain body dag
cplan.resetVisitStatusOutputs();
if( cplan instanceof CNodeMultiAgg )
for( CNode output : ((CNodeMultiAgg)cplan).getOutputs() )
sb.append(explainCNode(output, 1));
else
sb.append(explainCNode(cplan.getOutput(), 1));
cplan.resetVisitStatusOutputs();
sb.append("----------------------------------------\n");
return sb.toString();
}
public static String explain( CNode node ) {
return explain(node, 0);
}
public static String explain( CNode node, int level ) {
return explainCNode(node, level);
}
/**
* Counts the number of compiled MRJob/Spark instructions in the
* given runtime program.
*
* @param rtprog runtime program
* @return counts
*/
public static ExplainCounts countDistributedOperations( Program rtprog ) {
ExplainCounts counts = new ExplainCounts();
Explain.countCompiledInstructions(rtprog, counts, true, true);
return counts;
}
public static String getIdentation( int level ) {
return createOffset(level);
}
//////////////
// internal explain HOPS
private static String explainStatementBlock(StatementBlock sb, int level)
{
StringBuilder builder = new StringBuilder();
String offset = createOffset(level);
if (sb instanceof WhileStatementBlock) {
WhileStatementBlock wsb = (WhileStatementBlock) sb;
builder.append(offset);
if( !wsb.getUpdateInPlaceVars().isEmpty() || wsb.isRecompileOnce() ) {
builder.append("WHILE (lines "+wsb.getBeginLine()+"-"+wsb.getEndLine()+") ");
builder.append("[in-place="+wsb.getUpdateInPlaceVars().toString()+", recompile="+wsb.isRecompileOnce()+"]\n");
}
else
builder.append("WHILE (lines "+wsb.getBeginLine()+"-"+wsb.getEndLine()+")\n");
builder.append(explainHop(wsb.getPredicateHops(), level+1));
WhileStatement ws = (WhileStatement)sb.getStatement(0);
for (StatementBlock current : ws.getBody())
builder.append(explainStatementBlock(current, level+1));
}
else if (sb instanceof IfStatementBlock) {
IfStatementBlock ifsb = (IfStatementBlock) sb;
builder.append(offset);
builder.append("IF (lines "+ifsb.getBeginLine()+"-"+ifsb.getEndLine()+")\n");
builder.append(explainHop(ifsb.getPredicateHops(), level+1));
IfStatement ifs = (IfStatement) sb.getStatement(0);
for (StatementBlock current : ifs.getIfBody())
builder.append(explainStatementBlock(current, level+1));
if( !ifs.getElseBody().isEmpty() ) {
builder.append(offset);
builder.append("ELSE\n");
}
for (StatementBlock current : ifs.getElseBody())
builder.append(explainStatementBlock(current, level+1));
}
else if (sb instanceof ForStatementBlock) {
ForStatementBlock fsb = (ForStatementBlock) sb;
builder.append(offset);
if (sb instanceof ParForStatementBlock) {
if( !fsb.getUpdateInPlaceVars().isEmpty() )
builder.append("PARFOR (lines "+fsb.getBeginLine()+"-"+fsb.getEndLine()+") [in-place="+fsb.getUpdateInPlaceVars().toString()+"]\n");
else
builder.append("PARFOR (lines "+fsb.getBeginLine()+"-"+fsb.getEndLine()+")\n");
}
else {
if( !fsb.getUpdateInPlaceVars().isEmpty() || fsb.isRecompileOnce() ) {
builder.append("FOR (lines "+fsb.getBeginLine()+"-"+fsb.getEndLine()+") ");
builder.append("[in-place="+fsb.getUpdateInPlaceVars().toString()+", recompile="+fsb.isRecompileOnce()+"]\n");
}
else
builder.append("FOR (lines "+fsb.getBeginLine()+"-"+fsb.getEndLine()+")\n");
}
if (fsb.getFromHops() != null)
builder.append(explainHop(fsb.getFromHops(), level+1));
if (fsb.getToHops() != null)
builder.append(explainHop(fsb.getToHops(), level+1));
if (fsb.getIncrementHops() != null)
builder.append(explainHop(fsb.getIncrementHops(), level+1));
ForStatement fs = (ForStatement)sb.getStatement(0);
for (StatementBlock current : fs.getBody())
builder.append(explainStatementBlock(current, level+1));
}
else if (sb instanceof FunctionStatementBlock) {
FunctionStatement fsb = (FunctionStatement) sb.getStatement(0);
for (StatementBlock current : fsb.getBody())
builder.append(explainStatementBlock(current, level+1));
}
else {
// For generic StatementBlock
builder.append(offset);
builder.append("GENERIC (lines "+sb.getBeginLine()+"-"+sb.getEndLine()+") [recompile=" + sb.requiresRecompilation() + "]\n");
ArrayList<Hop> hopsDAG = sb.getHops();
if( hopsDAG != null && !hopsDAG.isEmpty() ) {
Hop.resetVisitStatus(hopsDAG);
for (Hop hop : hopsDAG)
builder.append(explainHop(hop, level+1));
Hop.resetVisitStatus(hopsDAG);
}
}
return builder.toString();
}
/**
* Do a post-order traverse through the Hop DAG and explain each Hop
*
* @param hop high-level operator
* @param level offset
* @return string explanation of Hop DAG
*/
private static String explainHop(Hop hop, int level) {
if( hop.isVisited() || (!SHOW_LITERAL_HOPS && hop instanceof LiteralOp) )
return "";
StringBuilder sb = new StringBuilder();
String offset = createOffset(level);
for( Hop input : hop.getInput() )
sb.append(explainHop(input, level));
//indentation
sb.append(offset);
//hop id
if( SHOW_DATA_DEPENDENCIES )
sb.append("("+hop.getHopID()+") ");
//operation string
sb.append(hop.getOpString());
//input hop references
if( SHOW_DATA_DEPENDENCIES ) {
StringBuilder childs = new StringBuilder();
childs.append(" (");
boolean childAdded = false;
for( Hop input : hop.getInput() )
if( SHOW_LITERAL_HOPS || !(input instanceof LiteralOp) ){
childs.append(childAdded?",":"");
childs.append(input.getHopID());
childAdded = true;
}
childs.append(")");
if( childAdded )
sb.append(childs.toString());
}
//matrix characteristics
sb.append(" [" + hop.getDim1() + ","
+ hop.getDim2() + ","
+ hop.getBlocksize() + ","
+ hop.getNnz());
if (hop.getUpdateType().isInPlace())
sb.append("," + hop.getUpdateType().toString().toLowerCase());
sb.append("]");
//memory estimates
sb.append(" [" + showMem(hop.getInputMemEstimate(), false) + ","
+ showMem(hop.getIntermediateMemEstimate(), false) + ","
+ showMem(hop.getOutputMemEstimate(), false) + " -> "
+ showMem(hop.getMemEstimate(), true) + "]");
//data flow properties
if( SHOW_DATA_FLOW_PROPERTIES ) {
if( hop.requiresReblock() && hop.requiresCheckpoint() )
sb.append(" [rblk,chkpt]");
else if( hop.requiresReblock() )
sb.append(" [rblk]");
else if( hop.requiresCheckpoint() )
sb.append(" [chkpt]");
}
//exec type
if (hop.getExecType() != null)
sb.append(", " + hop.getExecType());
if ( hop.getFederatedOutput() != FederatedOutput.NONE )
sb.append(" ").append(hop.getFederatedOutput()).append(" ");
sb.append('\n');
hop.setVisited();
return sb.toString();
}
private static String explainLineageItemNR(LineageItem item, int level) {
//NOTE: in contrast to similar non-recursive functions like resetVisitStatusNR,
// we maintain a more complex stack to ensure DFS ordering where current nodes
// are added after the subtree underneath is processed (backwards compatibility)
Stack<LineageItem> stackItem = new Stack<>();
Stack<MutableInt> stackPos = new Stack<>();
stackItem.push(item); stackPos.push(new MutableInt(0));
StringBuilder sb = new StringBuilder();
while( !stackItem.empty() ) {
LineageItem tmpItem = stackItem.peek();
MutableInt tmpPos = stackPos.peek();
//check ascent condition - no item processing
if( tmpItem.isVisited() ) {
stackItem.pop(); stackPos.pop();
}
//check ascent condition - append item
else if( tmpItem.getInputs() == null
|| tmpItem.getOpcode().startsWith(LineageItemUtils.LPLACEHOLDER)
// don't trace beyond if a placeholder is found
|| tmpItem.getInputs().length <= tmpPos.intValue() ) {
sb.append(createOffset(level));
sb.append(tmpItem.toString());
sb.append('\n');
stackItem.pop(); stackPos.pop();
tmpItem.setVisited();
}
//check descent condition
else if( tmpItem.getInputs() != null ) {
stackItem.push(tmpItem.getInputs()[tmpPos.intValue()]);
tmpPos.increment();
stackPos.push(new MutableInt(0));
}
}
return sb.toString();
}
@Deprecated
@SuppressWarnings("unused")
private static String explainLineageItem(LineageItem li, int level) {
if( li.isVisited())
return "";
StringBuilder sb = new StringBuilder();
String offset = createOffset(level);
if (li.getInputs() != null)
for( LineageItem input : li.getInputs() )
sb.append(explainLineageItem(input, level));
sb.append(offset);
sb.append(li.toString());
sb.append('\n');
li.setVisited();
return sb.toString();
}
//////////////
// internal explain CNODE
private static String explainCNode(CNode cnode, int level) {
if( cnode.isVisited() )
return "";
StringBuilder sb = new StringBuilder();
String offset = createOffset(level);
for( CNode input : cnode.getInput() )
sb.append(explainCNode(input, level));
//indentation
sb.append(offset);
//hop id
if( SHOW_DATA_DEPENDENCIES )
sb.append("("+cnode.getID()+") ");
//operation string
sb.append(cnode.toString());
//input hop references
if( SHOW_DATA_DEPENDENCIES ) {
StringBuilder childs = new StringBuilder();
childs.append(" (");
boolean childAdded = false;
for( CNode input : cnode.getInput() ) {
childs.append(childAdded?",":"");
childs.append(input.getID());
childAdded = true;
}
childs.append(")");
if( childAdded )
sb.append(childs.toString());
}
sb.append('\n');
cnode.setVisited();
return sb.toString();
}
//////////////
// internal explain RUNTIME
public static String explainProgramBlocks( List<ProgramBlock> pbs ) {
StringBuilder sb = new StringBuilder();
for(ProgramBlock pb : pbs)
sb.append(explain(pb));
return sb.toString();
}
private static String explainProgramBlock( ProgramBlock pb, int level )
{
StringBuilder sb = new StringBuilder();
String offset = createOffset(level);
if (pb instanceof FunctionProgramBlock ) {
FunctionProgramBlock fpb = (FunctionProgramBlock)pb;
for( ProgramBlock pbc : fpb.getChildBlocks() )
sb.append( explainProgramBlock( pbc, level+1) );
}
else if (pb instanceof WhileProgramBlock) {
WhileProgramBlock wpb = (WhileProgramBlock) pb;
StatementBlock wsb = pb.getStatementBlock();
sb.append(offset);
if( wsb != null && (!wsb.getUpdateInPlaceVars().isEmpty() || wsb.isRecompileOnce()) ) {
sb.append("WHILE (lines "+wpb.getBeginLine()+"-"+wpb.getEndLine()+") ");
sb.append("[in-place="+wsb.getUpdateInPlaceVars().toString()+", recompile="+wsb.isRecompileOnce()+"]\n");
}
else
sb.append("WHILE (lines "+wpb.getBeginLine()+"-"+wpb.getEndLine()+")\n");
sb.append(explainInstructions(wpb.getPredicate(), level+1));
for( ProgramBlock pbc : wpb.getChildBlocks() )
sb.append( explainProgramBlock( pbc, level+1) );
if( wpb.getExitInstruction() != null )
sb.append(explainInstructions(wpb.getExitInstruction(), level+1));
}
else if (pb instanceof IfProgramBlock) {
IfProgramBlock ipb = (IfProgramBlock) pb;
sb.append(offset);
sb.append("IF (lines "+ipb.getBeginLine()+"-"+ipb.getEndLine()+")\n");
sb.append(explainInstructions(ipb.getPredicate(), level+1));
for( ProgramBlock pbc : ipb.getChildBlocksIfBody() )
sb.append( explainProgramBlock( pbc, level+1) );
if( !ipb.getChildBlocksElseBody().isEmpty() ) {
sb.append(offset);
sb.append("ELSE\n");
for( ProgramBlock pbc : ipb.getChildBlocksElseBody() )
sb.append( explainProgramBlock( pbc, level+1) );
}
if( ipb.getExitInstruction() != null )
sb.append(explainInstructions(ipb.getExitInstruction(), level+1));
}
else if (pb instanceof ForProgramBlock) { //incl parfor
ForProgramBlock fpb = (ForProgramBlock) pb;
StatementBlock fsb = pb.getStatementBlock();
sb.append(offset);
if( pb instanceof ParForProgramBlock )
sb.append("PARFOR (lines "+fpb.getBeginLine()+"-"+fpb.getEndLine()+")\n");
else {
if( fsb != null && (!fsb.getUpdateInPlaceVars().isEmpty() || fsb.isRecompileOnce()) ) {
sb.append("FOR (lines "+fpb.getBeginLine()+"-"+fpb.getEndLine()+") ");
sb.append("[in-place="+fsb.getUpdateInPlaceVars().toString()+", recompile="+fsb.isRecompileOnce()+"]\n");
}
else
sb.append("FOR (lines "+fpb.getBeginLine()+"-"+fpb.getEndLine()+")\n");
}
sb.append(explainInstructions(fpb.getFromInstructions(), level+1));
sb.append(explainInstructions(fpb.getToInstructions(), level+1));
sb.append(explainInstructions(fpb.getIncrementInstructions(), level+1));
for( ProgramBlock pbc : fpb.getChildBlocks() )
sb.append( explainProgramBlock( pbc, level+1) );
if( fpb.getExitInstruction() != null )
sb.append(explainInstructions(fpb.getExitInstruction(), level+1));
}
else if( pb instanceof BasicProgramBlock ) {
BasicProgramBlock bpb = (BasicProgramBlock) pb;
sb.append(offset);
if( pb.getStatementBlock()!=null )
sb.append("GENERIC (lines "+pb.getBeginLine()+"-"+pb.getEndLine()+") [recompile="+pb.getStatementBlock().requiresRecompilation()+"]\n");
else
sb.append("GENERIC (lines "+pb.getBeginLine()+"-"+pb.getEndLine()+") \n");
sb.append(explainInstructions(bpb.getInstructions(), level+1));
}
return sb.toString();
}
private static String explainInstructions( List<Instruction> instSet, int level ) {
StringBuilder sb = new StringBuilder();
String offsetInst = createOffset(level);
for( Instruction inst : instSet ) {
String tmp = explainGenericInstruction(inst, level);
sb.append( offsetInst );
sb.append( tmp );
sb.append( '\n' );
}
return sb.toString();
}
private static String explainInstructions( Instruction inst, int level ) {
StringBuilder sb = new StringBuilder();
sb.append( createOffset(level) );
sb.append( explainGenericInstruction(inst, level) );
sb.append( '\n' );
return sb.toString();
}
private static String explainGenericInstruction( Instruction inst, int level )
{
String tmp = null;
if ( inst instanceof SPInstruction || inst instanceof CPInstruction
|| inst instanceof GPUInstruction || inst instanceof FEDInstruction
|| inst instanceof OOCInstruction)
tmp = inst.toString();
if( REPLACE_SPECIAL_CHARACTERS && tmp != null){
tmp = tmp.replaceAll(Lop.OPERAND_DELIMITOR, " ");
tmp = tmp.replaceAll(Lop.DATATYPE_PREFIX, ".");
tmp = tmp.replaceAll(Lop.INSTRUCTION_DELIMITOR, ", ");
}
return tmp;
}
@SuppressWarnings("unused")
private static String showMem(double mem, boolean units)
{
if( !SHOW_MEM_ABOVE_BUDGET && mem >= OptimizerUtils.DEFAULT_SIZE )
return "MAX";
return OptimizerUtils.toMB(mem) + (units?"MB":"");
}
public static String createOffset( int level )
{
StringBuilder sb = new StringBuilder();
for( int i=0; i<level; i++ )
sb.append("--");
return sb.toString();
}
private static void countCompiledInstructions( Program rtprog, ExplainCounts counts, boolean CP, boolean SP )
{
//analyze DML-bodied functions
for( FunctionProgramBlock fpb : rtprog.getFunctionProgramBlocks().values() )
countCompiledInstructions( fpb, counts, CP, SP );
//analyze main program
for( ProgramBlock pb : rtprog.getProgramBlocks() )
countCompiledInstructions( pb, counts, CP, SP );
}
/**
* Recursively counts the number of compiled MRJob instructions in the
* given runtime program block.
*
* @param pb program block
* @param counts explain countst
* @param CP if true, count CP instructions
* @param SP if true, count Spark instructions
*/
private static void countCompiledInstructions(ProgramBlock pb, ExplainCounts counts, boolean CP, boolean SP)
{
if (pb instanceof WhileProgramBlock) {
WhileProgramBlock tmp = (WhileProgramBlock)pb;
countCompiledInstructions(tmp.getPredicate(), counts, CP, SP);
for (ProgramBlock pb2 : tmp.getChildBlocks())
countCompiledInstructions(pb2, counts, CP, SP);
}
else if (pb instanceof IfProgramBlock) {
IfProgramBlock tmp = (IfProgramBlock)pb;
countCompiledInstructions(tmp.getPredicate(), counts, CP, SP);
for( ProgramBlock pb2 : tmp.getChildBlocksIfBody() )
countCompiledInstructions(pb2, counts, CP, SP);
for( ProgramBlock pb2 : tmp.getChildBlocksElseBody() )
countCompiledInstructions(pb2, counts, CP, SP);
}
else if (pb instanceof ForProgramBlock) { //includes ParFORProgramBlock
ForProgramBlock tmp = (ForProgramBlock)pb;
countCompiledInstructions(tmp.getFromInstructions(), counts, CP, SP);
countCompiledInstructions(tmp.getToInstructions(), counts, CP, SP);
countCompiledInstructions(tmp.getIncrementInstructions(), counts, CP, SP);
for( ProgramBlock pb2 : tmp.getChildBlocks() )
countCompiledInstructions(pb2, counts, CP, SP);
//additional parfor jobs counted during runtime
}
else if ( pb instanceof FunctionProgramBlock ) {
FunctionProgramBlock fpb = (FunctionProgramBlock)pb;
for( ProgramBlock pb2 : fpb.getChildBlocks() )
countCompiledInstructions(pb2, counts, CP, SP);
}
else if( pb instanceof BasicProgramBlock ) {
BasicProgramBlock bpb = (BasicProgramBlock) pb;
countCompiledInstructions(bpb.getInstructions(), counts, CP, SP);
}
}
/**
* Count the number of Hadoop instructions, CP instructions, Spark
* instructions, and/or Spark reblock instructions in a list of
* instructions.
*
* @param instSet
* list of instructions
* @param counts
* explain counts
* @param CP
* if true, count CP instructions
* @param SP
* if true, count Spark instructions and Spark reblock
* instructions
*/
private static void countCompiledInstructions( List<Instruction> instSet, ExplainCounts counts, boolean CP, boolean SP )
{
for( Instruction inst : instSet )
{
if( CP && inst instanceof CPInstruction )
counts.numCPInst++;
else if( SP && inst instanceof SPInstruction )
counts.numJobs++;
//keep track of reblocks (in order to prevent unnecessary spark context creation)
if( SP && (inst instanceof CSVReblockSPInstruction || inst instanceof ReblockSPInstruction) )
counts.numReblocks++;
if( SP && inst instanceof CheckpointSPInstruction )
counts.numChkpts++;
}
}
public static String explainFunctionCallGraph(FunctionCallGraph fgraph, Set<String> fstack, String fkey, int level)
{
StringBuilder builder = new StringBuilder();
String offset = createOffset(level);
Collection<String> cfkeys = fgraph.getCalledFunctions(fkey);
if( cfkeys != null ) {
for( String cfkey : cfkeys ) {
if( fstack.contains(cfkey) && fgraph.isRecursiveFunction(cfkey) )
builder.append(offset + "--" + cfkey + " (recursive)\n");
else {
fstack.add(cfkey);
builder.append(offset + "--" + cfkey + "\n");
builder.append(explainFunctionCallGraph(fgraph, fstack, cfkey, level+1));
fstack.remove(cfkey);
}
}
}
return builder.toString();
}
}
|
googleapis/google-api-java-client | 34,939 | google-api-client/src/test/java/com/google/api/client/googleapis/batch/BatchRequestTest.java | // Copyright 2012 Google Inc. All Rights Reserved.
package com.google.api.client.googleapis.batch;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.api.client.googleapis.batch.BatchRequest.RequestInfo;
import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo;
import com.google.api.client.googleapis.json.GoogleJsonErrorContainer;
import com.google.api.client.googleapis.testing.services.MockGoogleClient;
import com.google.api.client.googleapis.testing.services.MockGoogleClientRequest;
import com.google.api.client.http.ByteArrayContent;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpContent;
import com.google.api.client.http.HttpExecuteInterceptor;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpMethods;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpUnsuccessfulResponseHandler;
import com.google.api.client.http.LowLevelHttpRequest;
import com.google.api.client.http.LowLevelHttpResponse;
import com.google.api.client.json.GenericJson;
import com.google.api.client.json.JsonObjectParser;
import com.google.api.client.json.gson.GsonFactory;
import com.google.api.client.protobuf.ProtoObjectParser;
import com.google.api.client.testing.http.HttpTesting;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import com.google.api.client.util.Charsets;
import com.google.api.client.util.Key;
import com.google.api.client.util.ObjectParser;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
/**
* Tests {@link BatchRequest}.
*
* @author rmistry@google.com (Ravi Mistry)
*/
public class BatchRequestTest extends TestCase {
private static final String ROOT_URL = "http://www.test.com/";
private static final String SERVICE_PATH = "test/";
private static final String TEST_BATCH_URL = "http://www.testgoogleapis.com/batch";
private static final String URI_TEMPLATE1 = "uri/template/1";
private static final String URI_TEMPLATE2 = "uri/template/2";
private static final String METHOD1 = HttpMethods.GET;
private static final String METHOD2 = HttpMethods.POST;
private static final String ERROR_MSG = "Error message";
private static final String ERROR_REASON = "notFound";
private static final int ERROR_CODE = 503;
private static final String ERROR_DOMAIN = "global";
private static final String RESPONSE_BOUNDARY = "ABC=DE=F";
private static final String TEST_ID = "Humpty Dumpty";
private static final String TEST_KIND = "Big\nEgg\n"; // Newlines help test boundary detection
private static final String TEST_NAME = "James Bond";
private static final String TEST_NUM = "007";
private TestCallback1 callback1;
private TestCallback2 callback2;
private TestCallback3 callback3;
private MockTransport transport;
private MockCredential credential;
@Override
protected void setUp() {
callback1 = new TestCallback1();
callback2 = new TestCallback2();
callback3 = new TestCallback3();
}
public static class MockDataClass1 extends GenericJson {
@Key String id;
@Key String kind;
}
public static class MockDataClass2 extends GenericJson {
@Key String name;
@Key String number;
}
private static class TestCallback1
implements BatchCallback<MockDataClass1, GoogleJsonErrorContainer> {
int successCalls;
TestCallback1() {}
@Override
public void onSuccess(MockDataClass1 dataClass, HttpHeaders responseHeaders) {
successCalls++;
assertEquals(TEST_ID, dataClass.id);
assertEquals(TEST_KIND, dataClass.kind);
}
@Override
public void onFailure(GoogleJsonErrorContainer e, HttpHeaders responseHeaders) {
fail("Should not be invoked in this test");
}
}
private static class TestCallback2
implements BatchCallback<MockDataClass2, GoogleJsonErrorContainer> {
int successCalls;
int failureCalls;
TestCallback2() {}
@Override
public void onSuccess(MockDataClass2 dataClass, HttpHeaders responseHeaders) {
successCalls++;
assertEquals(TEST_NAME, dataClass.name);
assertEquals(TEST_NUM, dataClass.number);
}
@Override
public void onFailure(GoogleJsonErrorContainer e, HttpHeaders responseHeaders) {
failureCalls++;
GoogleJsonError error = e.getError();
ErrorInfo errorInfo = error.getErrors().get(0);
assertEquals(ERROR_DOMAIN, errorInfo.getDomain());
assertEquals(ERROR_REASON, errorInfo.getReason());
assertEquals(ERROR_MSG, errorInfo.getMessage());
assertEquals(ERROR_CODE, error.getCode());
assertEquals(ERROR_MSG, error.getMessage());
}
}
private static class TestCallback3 implements BatchCallback<Void, Void> {
int successCalls;
int failureCalls;
TestCallback3() {}
@Override
public void onSuccess(Void dataClass, HttpHeaders responseHeaders) {
successCalls++;
assertNull(dataClass);
}
@Override
public void onFailure(Void e, HttpHeaders responseHeaders) {
failureCalls++;
assertNull(e);
}
}
/**
* Base class for callback adapters to handle error conversion.
*
* @param <InputType> The input type
* @param <OutputType> The output type
*/
private abstract static class TestCallbackBaseAdapter<InputType, OutputType>
implements BatchCallback<InputType, ErrorOutput.ErrorBody> {
protected final BatchCallback<OutputType, GoogleJsonErrorContainer> callback;
protected TestCallbackBaseAdapter(
BatchCallback<OutputType, GoogleJsonErrorContainer> callback) {
this.callback = callback;
}
@Override
public void onFailure(ErrorOutput.ErrorBody e, HttpHeaders responseHeaders) throws IOException {
GoogleJsonErrorContainer errorContainer = new GoogleJsonErrorContainer();
if (e.hasError()) {
ErrorOutput.ErrorProto errorProto = e.getError();
GoogleJsonError error = new GoogleJsonError();
if (errorProto.hasCode()) {
error.setCode(errorProto.getCode());
}
if (errorProto.hasMessage()) {
error.setMessage(errorProto.getMessage());
}
List<ErrorInfo> errorInfos = new ArrayList<ErrorInfo>(errorProto.getErrorsCount());
for (ErrorOutput.IndividualError individualError : errorProto.getErrorsList()) {
ErrorInfo errorInfo = new ErrorInfo();
if (individualError.hasDomain()) {
errorInfo.setDomain(individualError.getDomain());
}
if (individualError.hasMessage()) {
errorInfo.setMessage(individualError.getMessage());
}
if (individualError.hasReason()) {
errorInfo.setReason(individualError.getReason());
}
errorInfos.add(errorInfo);
}
error.setErrors(errorInfos);
errorContainer.setError(error);
}
callback.onFailure(errorContainer, responseHeaders);
}
}
private static class TestCallback1Adapter
extends TestCallbackBaseAdapter<MockData.Class1, MockDataClass1> {
public TestCallback1Adapter(TestCallback1 callback) {
super(callback);
}
@Override
public void onSuccess(MockData.Class1 message, HttpHeaders responseHeaders) throws IOException {
MockDataClass1 dataClass = new MockDataClass1();
dataClass.id = message.hasId() ? message.getId() : null;
dataClass.kind = message.hasKind() ? message.getKind() : null;
callback.onSuccess(dataClass, responseHeaders);
}
}
private static class TestCallback2Adapter
extends TestCallbackBaseAdapter<MockData.Class2, MockDataClass2> {
public TestCallback2Adapter(TestCallback2 callback) {
super(callback);
}
@Override
public void onSuccess(MockData.Class2 message, HttpHeaders responseHeaders) throws IOException {
MockDataClass2 dataClass = new MockDataClass2();
dataClass.name = message.hasName() ? message.getName() : null;
dataClass.number = message.hasNumber() ? message.getNumber() : null;
callback.onSuccess(dataClass, responseHeaders);
}
}
private static class MockUnsuccessfulResponseHandler implements HttpUnsuccessfulResponseHandler {
MockTransport transport;
boolean returnSuccessAuthenticatedContent;
MockUnsuccessfulResponseHandler(
MockTransport transport, boolean returnSuccessAuthenticatedContent) {
this.transport = transport;
this.returnSuccessAuthenticatedContent = returnSuccessAuthenticatedContent;
}
@Override
public boolean handleResponse(
HttpRequest request, HttpResponse response, boolean supportsRetry) {
if (transport.returnErrorAuthenticatedContent) {
// If transport has already been set to return error content do not handle response.
return false;
}
if (returnSuccessAuthenticatedContent) {
transport.returnSuccessAuthenticatedContent = true;
} else {
transport.returnErrorAuthenticatedContent = true;
}
return true;
}
}
private static class MockTransport extends MockHttpTransport {
final boolean testServerError;
final boolean testAuthenticationError;
boolean returnSuccessAuthenticatedContent;
boolean returnErrorAuthenticatedContent;
final boolean testRedirect;
final boolean testBinary;
final boolean testMissingLength;
int actualCalls;
int callsBeforeSuccess;
MockTransport(
boolean testServerError,
boolean testAuthenticationError,
boolean testRedirect,
boolean testBinary,
boolean testMissingLength) {
this.testServerError = testServerError;
this.testAuthenticationError = testAuthenticationError;
this.testRedirect = testRedirect;
this.testBinary = testBinary;
this.testMissingLength = testMissingLength;
}
@Override
public LowLevelHttpRequest buildRequest(String name, String url) {
actualCalls++;
return new MockLowLevelHttpRequest() {
@Override
public LowLevelHttpResponse execute() throws IOException {
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
response.setStatusCode(200);
response.addHeader("Content-Type", "multipart/mixed; boundary=" + RESPONSE_BOUNDARY);
String contentType =
testBinary ? "application/x-protobuf" : "application/json; charset=UTF-8";
byte[] content1 =
testBinary
? MockData.Class1.newBuilder()
.setId(TEST_ID)
.setKind(TEST_KIND)
.build()
.toByteArray()
: utf8Encode(
"{\n \"id\": \""
+ TEST_ID
+ "\",\n \"kind\": \""
+ TEST_KIND.replace("\n", "\\n")
+ "\"\n}");
byte[] content2 =
testBinary
? MockData.Class2.newBuilder()
.setName(TEST_NAME)
.setNumber(TEST_NUM)
.build()
.toByteArray()
: utf8Encode(
"{\"name\": \"" + TEST_NAME + "\", \"number\": \"" + TEST_NUM + "\"}");
byte[] errorContent =
testBinary
? ErrorOutput.ErrorBody.newBuilder()
.setError(
ErrorOutput.ErrorProto.newBuilder()
.setCode(ERROR_CODE)
.setMessage(ERROR_MSG)
.addErrors(
ErrorOutput.IndividualError.newBuilder()
.setDomain(ERROR_DOMAIN)
.setReason(ERROR_REASON)
.setMessage(ERROR_MSG)))
.build()
.toByteArray()
: utf8Encode(
"{\"error\": { \"errors\": [{\"domain\": \""
+ ERROR_DOMAIN
+ "\","
+ "\"reason\": \""
+ ERROR_REASON
+ "\", \"message\": \""
+ ERROR_MSG
+ "\"}],"
+ "\"code\": "
+ ERROR_CODE
+ ", \"message\": \""
+ ERROR_MSG
+ "\"}}");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Writer responseContent = new OutputStreamWriter(outputStream, "ISO-8859-1");
if (returnSuccessAuthenticatedContent || (testRedirect && actualCalls > 1)) {
if (returnSuccessAuthenticatedContent || actualCalls == callsBeforeSuccess) {
responseContent
.append("--" + RESPONSE_BOUNDARY + "\n")
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-1\n\n")
.append("HTTP/1.1 200 OK\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + content2.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(content2);
responseContent.append("\n--" + RESPONSE_BOUNDARY + "--\n\n");
} else {
responseContent
.append("--" + RESPONSE_BOUNDARY + "\n")
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-1\n\n")
.append("HTTP/1.1 " + ERROR_CODE + " Not Found\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + errorContent.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(errorContent);
responseContent.append("\n--" + RESPONSE_BOUNDARY + "--\n\n");
}
} else if (returnErrorAuthenticatedContent) {
responseContent
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-1\n\n");
responseContent
.append("HTTP/1.1 " + ERROR_CODE + " Not Found\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + errorContent.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(errorContent);
responseContent.append("\n--" + RESPONSE_BOUNDARY + "--\n\n");
} else {
responseContent
.append("--" + RESPONSE_BOUNDARY + "\n")
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-1\n\n")
.append("HTTP/1.1 200 OK\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + content1.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(content1);
responseContent
.append("\n--" + RESPONSE_BOUNDARY + "\n")
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-2\n\n");
if (testServerError) {
responseContent
.append("HTTP/1.1 " + ERROR_CODE + " Not Found\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + errorContent.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(errorContent);
responseContent.append("\n--" + RESPONSE_BOUNDARY + "--\n\n");
} else if (testAuthenticationError) {
responseContent
.append("HTTP/1.1 401 Unauthorized\n")
.append("Content-Type: application/json; charset=UTF-8\n\n")
.append("--" + RESPONSE_BOUNDARY + "--\n\n");
} else if (testRedirect && actualCalls == 1) {
responseContent
.append("HTTP/1.1 301 MovedPermanently\n")
.append("Content-Type: " + contentType + "\n")
.append("Location: http://redirect/location\n\n")
.append("--" + RESPONSE_BOUNDARY + "--\n\n");
} else {
responseContent
.append("HTTP/1.1 200 OK\n")
.append("Content-Type: " + contentType + "\n");
if (!testMissingLength) {
responseContent.append("Content-Length: " + content2.length + "\n");
}
responseContent.append("\n");
responseContent.flush();
outputStream.write(content2);
responseContent.append("\n--" + RESPONSE_BOUNDARY + "--\n\n");
}
}
responseContent.flush();
response.setContent(outputStream.toByteArray());
return response;
}
// Short-hand to encode a String as a UTF-8 byte array
private byte[] utf8Encode(String string) {
return Charsets.UTF_8.encode(string).array();
}
};
}
}
private static class MockCredential implements HttpRequestInitializer, HttpExecuteInterceptor {
boolean initializerCalled = false;
boolean interceptorCalled = false;
MockCredential() {}
@Override
public void initialize(HttpRequest request) {
request.setInterceptor(this);
initializerCalled = true;
}
@Override
public void intercept(HttpRequest request) {
interceptorCalled = true;
}
}
private BatchRequest getBatchPopulatedWithRequests(
boolean testServerError,
boolean testAuthenticationError,
boolean returnSuccessAuthenticatedContent,
boolean testRedirect,
boolean testBinary,
boolean testMissingLength)
throws IOException {
transport =
new MockTransport(
testServerError, testAuthenticationError, testRedirect, testBinary, testMissingLength);
MockGoogleClient client =
new MockGoogleClient.Builder(transport, ROOT_URL, SERVICE_PATH, null, null)
.setApplicationName("Test Application")
.build();
MockGoogleClientRequest<String> jsonHttpRequest1 =
new MockGoogleClientRequest<String>(client, METHOD1, URI_TEMPLATE1, null, String.class);
MockGoogleClientRequest<String> jsonHttpRequest2 =
new MockGoogleClientRequest<String>(client, METHOD2, URI_TEMPLATE2, null, String.class);
credential = new MockCredential();
ObjectParser parser =
testBinary ? new ProtoObjectParser() : new JsonObjectParser(new GsonFactory());
BatchRequest batchRequest =
new BatchRequest(transport, credential).setBatchUrl(new GenericUrl(TEST_BATCH_URL));
HttpRequest request1 = jsonHttpRequest1.buildHttpRequest();
request1.setParser(parser);
HttpRequest request2 = jsonHttpRequest2.buildHttpRequest();
request2.setParser(parser);
if (testAuthenticationError) {
request2.setUnsuccessfulResponseHandler(
new MockUnsuccessfulResponseHandler(transport, returnSuccessAuthenticatedContent));
}
if (testBinary) {
batchRequest.queue(
request1,
MockData.Class1.class,
ErrorOutput.ErrorBody.class,
new TestCallback1Adapter(callback1));
batchRequest.queue(
request2,
MockData.Class2.class,
ErrorOutput.ErrorBody.class,
new TestCallback2Adapter(callback2));
} else {
batchRequest.queue(request1, MockDataClass1.class, GoogleJsonErrorContainer.class, callback1);
batchRequest.queue(request2, MockDataClass2.class, GoogleJsonErrorContainer.class, callback2);
}
return batchRequest;
}
public void testQueueDatastructures() throws Exception {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, false, false, false, false, false);
List<RequestInfo<?, ?>> requestInfos = batchRequest.requestInfos;
// Assert that the expected objects are queued.
assertEquals(2, requestInfos.size());
assertEquals(MockDataClass1.class, requestInfos.get(0).dataClass);
assertEquals(callback1, requestInfos.get(0).callback);
assertEquals(MockDataClass2.class, requestInfos.get(1).dataClass);
assertEquals(callback2, requestInfos.get(1).callback);
// Assert that the requests in the queue are as expected.
assertEquals(
ROOT_URL + SERVICE_PATH + URI_TEMPLATE1, requestInfos.get(0).request.getUrl().build());
assertEquals(
ROOT_URL + SERVICE_PATH + URI_TEMPLATE2, requestInfos.get(1).request.getUrl().build());
assertEquals(METHOD1, requestInfos.get(0).request.getRequestMethod());
assertEquals(METHOD2, requestInfos.get(1).request.getRequestMethod());
}
public void testExecute() throws IOException {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, false, false, false, false, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(1, callback2.successCalls);
assertEquals(0, callback2.failureCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
public void testExecuteWithError() throws IOException {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(true, false, false, false, false, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(0, callback2.successCalls);
assertEquals(1, callback2.failureCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
// Assert transport called expected number of times.
assertEquals(1, transport.actualCalls);
}
public void testExecuteWithVoidCallback() throws Exception {
subTestExecuteWithVoidCallback(false);
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(1, callback3.successCalls);
assertEquals(0, callback3.failureCalls);
}
public void testExecuteWithVoidCallbackError() throws Exception {
subTestExecuteWithVoidCallback(true);
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(0, callback3.successCalls);
assertEquals(1, callback3.failureCalls);
}
public void subTestExecuteWithVoidCallback(boolean testServerError) throws IOException {
MockTransport transport = new MockTransport(testServerError, false, false, false, false);
MockGoogleClient client =
new MockGoogleClient.Builder(transport, ROOT_URL, SERVICE_PATH, null, null)
.setApplicationName("Test Application")
.build();
MockGoogleClientRequest<String> jsonHttpRequest1 =
new MockGoogleClientRequest<String>(client, METHOD1, URI_TEMPLATE1, null, String.class);
MockGoogleClientRequest<String> jsonHttpRequest2 =
new MockGoogleClientRequest<String>(client, METHOD2, URI_TEMPLATE2, null, String.class);
ObjectParser parser = new JsonObjectParser(new GsonFactory());
BatchRequest batchRequest =
new BatchRequest(transport, null).setBatchUrl(new GenericUrl(TEST_BATCH_URL));
HttpRequest request1 = jsonHttpRequest1.buildHttpRequest();
request1.setParser(parser);
HttpRequest request2 = jsonHttpRequest2.buildHttpRequest();
request2.setParser(parser);
batchRequest.queue(request1, MockDataClass1.class, GoogleJsonErrorContainer.class, callback1);
batchRequest.queue(request2, Void.class, Void.class, callback3);
batchRequest.execute();
// Assert transport called expected number of times.
assertEquals(1, transport.actualCalls);
}
public void testExecuteWithAuthenticationErrorThenSuccessCallback() throws Exception {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, true, true, false, false, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(1, callback2.successCalls);
assertEquals(0, callback2.failureCalls);
// Assert transport called expected number of times.
assertEquals(2, transport.actualCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
public void testExecuteWithAuthenticationErrorThenErrorCallback() throws Exception {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, true, false, false, false, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(0, callback2.successCalls);
assertEquals(1, callback2.failureCalls);
// Assert transport called expected number of times.
assertEquals(2, transport.actualCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
public void testInterceptor() throws Exception {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(true, false, false, false, false, false);
batchRequest.execute();
// Assert the top-level request initializer is called.
assertTrue(credential.initializerCalled);
assertTrue(credential.interceptorCalled);
}
public void testRedirect() throws Exception {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, false, false, true, false, false);
transport.callsBeforeSuccess = 2;
batchRequest.execute();
// Assert transport called expected number of times.
assertEquals(2, transport.actualCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
public void testExecute_checkWriteTo() throws Exception {
String request1Method = HttpMethods.POST;
String request1Url = "http://test/dummy/url1";
String request1ContentType = "application/json";
String request1Content = "{\"data\":{\"foo\":{\"v1\":{}}}}";
String request2Method = HttpMethods.GET;
String request2Url = "http://test/dummy/url2";
// MIME content boundaries are not reproducible.
StringBuilder part1 = new StringBuilder();
part1.append("Content-Length: 118\r\n");
part1.append("Content-Type: application/http\r\n");
part1.append("content-id: 1\r\n");
part1.append("content-transfer-encoding: binary\r\n");
part1.append("\r\n");
part1.append("POST http://test/dummy/url1 HTTP/1.1\r\n");
part1.append("Content-Length: 26\r\n");
part1.append("Content-Type: " + request1ContentType + "\r\n");
part1.append("\r\n");
part1.append(request1Content + "\r\n");
part1.append("--__END_OF_PART__");
String expected1 = part1.toString();
StringBuilder part2 = new StringBuilder();
part2.append("Content-Length: 39\r\n");
part2.append("Content-Type: application/http\r\n");
part2.append("content-id: 2\r\n");
part2.append("content-transfer-encoding: binary\r\n");
part2.append("\r\n");
part2.append("GET http://test/dummy/url2 HTTP/1.1\r\n");
part2.append("\r\n");
part2.append("\r\n");
part2.append("--__END_OF_PART__");
String expected2 = part2.toString();
MockHttpTransport transport = new MockHttpTransport();
HttpRequest request1 =
transport
.createRequestFactory()
.buildRequest(
request1Method,
new GenericUrl(request1Url),
new ByteArrayContent(request1ContentType, request1Content.getBytes(UTF_8)));
HttpRequest request2 =
transport
.createRequestFactory()
.buildRequest(request2Method, new GenericUrl(request2Url), null);
subtestExecute_checkWriteTo(expected1, expected2, request1, request2);
}
private void subtestExecute_checkWriteTo(
final String part1, final String part2, HttpRequest... requests) throws IOException {
MockHttpTransport transport =
new MockHttpTransport() {
@Override
public LowLevelHttpRequest buildRequest(String method, String url) {
return new MockLowLevelHttpRequest(url) {
@Override
public LowLevelHttpResponse execute() throws IOException {
assertTrue(
getContentType().startsWith("multipart/mixed; boundary=__END_OF_PART__"));
ByteArrayOutputStream out = new ByteArrayOutputStream();
getStreamingContent().writeTo(out);
String actual = out.toString("UTF-8");
assertTrue(actual + "\n does not contain \n" + part1, actual.contains(part1));
assertTrue(actual.contains(part2));
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
response.setStatusCode(200);
response.addHeader(
"Content-Type", "multipart/mixed; boundary=" + RESPONSE_BOUNDARY);
String content2 =
"{\"name\": \"" + TEST_NAME + "\", \"number\": \"" + TEST_NUM + "\"}";
StringBuilder responseContent = new StringBuilder();
responseContent
.append("--" + RESPONSE_BOUNDARY + "\n")
.append("Content-Type: application/http\n")
.append("Content-Transfer-Encoding: binary\n")
.append("Content-ID: response-1\n\n")
.append("HTTP/1.1 200 OK\n")
.append("Content-Type: application/json; charset=UTF-8\n")
.append("Content-Length: " + content2.length() + "\n\n")
.append(content2 + "\n\n")
.append("--" + RESPONSE_BOUNDARY + "--\n\n");
response.setContent(responseContent.toString());
return response;
}
};
}
};
BatchRequest batchRequest = new BatchRequest(transport, null);
BatchCallback<Void, Void> callback =
new BatchCallback<Void, Void>() {
@Override
public void onSuccess(Void t, HttpHeaders responseHeaders) {}
@Override
public void onFailure(Void e, HttpHeaders responseHeaders) {}
};
for (HttpRequest request : requests) {
batchRequest.queue(request, Void.class, Void.class, callback);
}
batchRequest.execute();
}
public void testExecute_checkWriteToNoHeaders() throws IOException {
MockHttpTransport transport = new MockHttpTransport();
HttpRequest request =
transport
.createRequestFactory()
.buildPostRequest(
HttpTesting.SIMPLE_GENERIC_URL,
new HttpContent() {
@Override
public long getLength() {
return -1;
}
@Override
public String getType() {
return null;
}
@Override
public void writeTo(OutputStream out) {}
@Override
public boolean retrySupported() {
return true;
}
});
String expected =
new StringBuilder()
.append("Content-Length: 36\r\n")
.append("Content-Type: application/http\r\n")
.append("content-id: 1\r\n")
.append("content-transfer-encoding: binary\r\n")
.append("\r\n")
.append("POST http://google.com/ HTTP/1.1\r\n")
.append("\r\n")
.append("\r\n")
.append("--__END_OF_PART__")
.toString();
subtestExecute_checkWriteTo(expected, expected, request);
}
public void testProtoExecute() throws IOException {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, false, false, false, true, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(1, callback2.successCalls);
assertEquals(0, callback2.failureCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
public void testProtoExecuteWithError() throws IOException {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(true, false, false, false, true, false);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(0, callback2.successCalls);
assertEquals(1, callback2.failureCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
// Assert transport called expected number of times.
assertEquals(1, transport.actualCalls);
}
public void testProtoExecuteWithoutLength() throws IOException {
BatchRequest batchRequest =
getBatchPopulatedWithRequests(false, false, false, false, true, true);
batchRequest.execute();
// Assert callbacks have been invoked.
assertEquals(1, callback1.successCalls);
assertEquals(1, callback2.successCalls);
assertEquals(0, callback2.failureCalls);
// Assert requestInfos is empty after execute.
assertTrue(batchRequest.requestInfos.isEmpty());
}
}
|
googleapis/google-cloud-java | 35,511 | java-dataproc/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/HttpJsonAutoscalingPolicyServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataproc.v1.stub;
import static com.google.cloud.dataproc.v1.AutoscalingPolicyServiceClient.ListAutoscalingPoliciesPagedResponse;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dataproc.v1.AutoscalingPolicy;
import com.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest;
import com.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest;
import com.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest;
import com.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest;
import com.google.cloud.dataproc.v1.ListAutoscalingPoliciesResponse;
import com.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the AutoscalingPolicyService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonAutoscalingPolicyServiceStub extends AutoscalingPolicyServiceStub {
private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build();
private static final ApiMethodDescriptor<CreateAutoscalingPolicyRequest, AutoscalingPolicy>
createAutoscalingPolicyMethodDescriptor =
ApiMethodDescriptor.<CreateAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setFullMethodName(
"google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateAutoscalingPolicyRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/autoscalingPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setAdditionalPaths("/v1/{parent=projects/*/regions/*}/autoscalingPolicies")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("policy", request.getPolicy(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<AutoscalingPolicy>newBuilder()
.setDefaultInstance(AutoscalingPolicy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>
updateAutoscalingPolicyMethodDescriptor =
ApiMethodDescriptor.<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setFullMethodName(
"google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy")
.setHttpMethod("PUT")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateAutoscalingPolicyRequest>newBuilder()
.setPath(
"/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "policy.name", request.getPolicy().getName());
return fields;
})
.setAdditionalPaths(
"/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("policy", request.getPolicy(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<AutoscalingPolicy>newBuilder()
.setDefaultInstance(AutoscalingPolicy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetAutoscalingPolicyRequest, AutoscalingPolicy>
getAutoscalingPolicyMethodDescriptor =
ApiMethodDescriptor.<GetAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setFullMethodName(
"google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetAutoscalingPolicyRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/autoscalingPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setAdditionalPaths("/v1/{name=projects/*/regions/*/autoscalingPolicies/*}")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<AutoscalingPolicy>newBuilder()
.setDefaultInstance(AutoscalingPolicy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<
ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>
listAutoscalingPoliciesMethodDescriptor =
ApiMethodDescriptor
.<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>newBuilder()
.setFullMethodName(
"google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListAutoscalingPoliciesRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/autoscalingPolicies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListAutoscalingPoliciesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setAdditionalPaths("/v1/{parent=projects/*/regions/*}/autoscalingPolicies")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListAutoscalingPoliciesRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListAutoscalingPoliciesResponse>newBuilder()
.setDefaultInstance(ListAutoscalingPoliciesResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteAutoscalingPolicyRequest, Empty>
deleteAutoscalingPolicyMethodDescriptor =
ApiMethodDescriptor.<DeleteAutoscalingPolicyRequest, Empty>newBuilder()
.setFullMethodName(
"google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteAutoscalingPolicyRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/autoscalingPolicies/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setAdditionalPaths("/v1/{name=projects/*/regions/*/autoscalingPolicies/*}")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteAutoscalingPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy>
setIamPolicyMethodDescriptor =
ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy>
getIamPolicyMethodDescriptor =
ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder()
.setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<CreateAutoscalingPolicyRequest, AutoscalingPolicy>
createAutoscalingPolicyCallable;
private final UnaryCallable<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>
updateAutoscalingPolicyCallable;
private final UnaryCallable<GetAutoscalingPolicyRequest, AutoscalingPolicy>
getAutoscalingPolicyCallable;
private final UnaryCallable<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>
listAutoscalingPoliciesCallable;
private final UnaryCallable<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesPagedResponse>
listAutoscalingPoliciesPagedCallable;
private final UnaryCallable<DeleteAutoscalingPolicyRequest, Empty>
deleteAutoscalingPolicyCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonAutoscalingPolicyServiceStub create(
AutoscalingPolicyServiceStubSettings settings) throws IOException {
return new HttpJsonAutoscalingPolicyServiceStub(settings, ClientContext.create(settings));
}
public static final HttpJsonAutoscalingPolicyServiceStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonAutoscalingPolicyServiceStub(
AutoscalingPolicyServiceStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonAutoscalingPolicyServiceStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonAutoscalingPolicyServiceStub(
AutoscalingPolicyServiceStubSettings.newHttpJsonBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of HttpJsonAutoscalingPolicyServiceStub, using the given settings. This
* is protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonAutoscalingPolicyServiceStub(
AutoscalingPolicyServiceStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonAutoscalingPolicyServiceCallableFactory());
}
/**
* Constructs an instance of HttpJsonAutoscalingPolicyServiceStub, using the given settings. This
* is protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonAutoscalingPolicyServiceStub(
AutoscalingPolicyServiceStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
HttpJsonCallSettings<CreateAutoscalingPolicyRequest, AutoscalingPolicy>
createAutoscalingPolicyTransportSettings =
HttpJsonCallSettings.<CreateAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setMethodDescriptor(createAutoscalingPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>
updateAutoscalingPolicyTransportSettings =
HttpJsonCallSettings.<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setMethodDescriptor(updateAutoscalingPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("policy.name", String.valueOf(request.getPolicy().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetAutoscalingPolicyRequest, AutoscalingPolicy>
getAutoscalingPolicyTransportSettings =
HttpJsonCallSettings.<GetAutoscalingPolicyRequest, AutoscalingPolicy>newBuilder()
.setMethodDescriptor(getAutoscalingPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>
listAutoscalingPoliciesTransportSettings =
HttpJsonCallSettings
.<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>newBuilder()
.setMethodDescriptor(listAutoscalingPoliciesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteAutoscalingPolicyRequest, Empty>
deleteAutoscalingPolicyTransportSettings =
HttpJsonCallSettings.<DeleteAutoscalingPolicyRequest, Empty>newBuilder()
.setMethodDescriptor(deleteAutoscalingPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.createAutoscalingPolicyCallable =
callableFactory.createUnaryCallable(
createAutoscalingPolicyTransportSettings,
settings.createAutoscalingPolicySettings(),
clientContext);
this.updateAutoscalingPolicyCallable =
callableFactory.createUnaryCallable(
updateAutoscalingPolicyTransportSettings,
settings.updateAutoscalingPolicySettings(),
clientContext);
this.getAutoscalingPolicyCallable =
callableFactory.createUnaryCallable(
getAutoscalingPolicyTransportSettings,
settings.getAutoscalingPolicySettings(),
clientContext);
this.listAutoscalingPoliciesCallable =
callableFactory.createUnaryCallable(
listAutoscalingPoliciesTransportSettings,
settings.listAutoscalingPoliciesSettings(),
clientContext);
this.listAutoscalingPoliciesPagedCallable =
callableFactory.createPagedCallable(
listAutoscalingPoliciesTransportSettings,
settings.listAutoscalingPoliciesSettings(),
clientContext);
this.deleteAutoscalingPolicyCallable =
callableFactory.createUnaryCallable(
deleteAutoscalingPolicyTransportSettings,
settings.deleteAutoscalingPolicySettings(),
clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createAutoscalingPolicyMethodDescriptor);
methodDescriptors.add(updateAutoscalingPolicyMethodDescriptor);
methodDescriptors.add(getAutoscalingPolicyMethodDescriptor);
methodDescriptors.add(listAutoscalingPoliciesMethodDescriptor);
methodDescriptors.add(deleteAutoscalingPolicyMethodDescriptor);
methodDescriptors.add(setIamPolicyMethodDescriptor);
methodDescriptors.add(getIamPolicyMethodDescriptor);
methodDescriptors.add(testIamPermissionsMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<CreateAutoscalingPolicyRequest, AutoscalingPolicy>
createAutoscalingPolicyCallable() {
return createAutoscalingPolicyCallable;
}
@Override
public UnaryCallable<UpdateAutoscalingPolicyRequest, AutoscalingPolicy>
updateAutoscalingPolicyCallable() {
return updateAutoscalingPolicyCallable;
}
@Override
public UnaryCallable<GetAutoscalingPolicyRequest, AutoscalingPolicy>
getAutoscalingPolicyCallable() {
return getAutoscalingPolicyCallable;
}
@Override
public UnaryCallable<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse>
listAutoscalingPoliciesCallable() {
return listAutoscalingPoliciesCallable;
}
@Override
public UnaryCallable<ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesPagedResponse>
listAutoscalingPoliciesPagedCallable() {
return listAutoscalingPoliciesPagedCallable;
}
@Override
public UnaryCallable<DeleteAutoscalingPolicyRequest, Empty> deleteAutoscalingPolicyCallable() {
return deleteAutoscalingPolicyCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 35,279 | java-assured-workloads/proto-google-cloud-assured-workloads-v1/src/main/java/com/google/cloud/assuredworkloads/v1/UpdateWorkloadRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/assuredworkloads/v1/assuredworkloads.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.assuredworkloads.v1;
/**
*
*
* <pre>
* Request for Updating a workload.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.UpdateWorkloadRequest}
*/
public final class UpdateWorkloadRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1.UpdateWorkloadRequest)
UpdateWorkloadRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateWorkloadRequest.newBuilder() to construct.
private UpdateWorkloadRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateWorkloadRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateWorkloadRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.class,
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.Builder.class);
}
private int bitField0_;
public static final int WORKLOAD_FIELD_NUMBER = 1;
private com.google.cloud.assuredworkloads.v1.Workload workload_;
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workload field is set.
*/
@java.lang.Override
public boolean hasWorkload() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workload.
*/
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.Workload getWorkload() {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder getWorkloadOrBuilder() {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getWorkload());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWorkload());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest)) {
return super.equals(obj);
}
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest other =
(com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest) obj;
if (hasWorkload() != other.hasWorkload()) return false;
if (hasWorkload()) {
if (!getWorkload().equals(other.getWorkload())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasWorkload()) {
hash = (37 * hash) + WORKLOAD_FIELD_NUMBER;
hash = (53 * hash) + getWorkload().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for Updating a workload.
* </pre>
*
* Protobuf type {@code google.cloud.assuredworkloads.v1.UpdateWorkloadRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1.UpdateWorkloadRequest)
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.class,
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.Builder.class);
}
// Construct using com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWorkloadFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
workload_ = null;
if (workloadBuilder_ != null) {
workloadBuilder_.dispose();
workloadBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto
.internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest getDefaultInstanceForType() {
return com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest build() {
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest buildPartial() {
com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest result =
new com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.workload_ = workloadBuilder_ == null ? workload_ : workloadBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest) {
return mergeFrom((com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest other) {
if (other == com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest.getDefaultInstance())
return this;
if (other.hasWorkload()) {
mergeWorkload(other.getWorkload());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getWorkloadFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.assuredworkloads.v1.Workload workload_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>
workloadBuilder_;
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workload field is set.
*/
public boolean hasWorkload() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workload.
*/
public com.google.cloud.assuredworkloads.v1.Workload getWorkload() {
if (workloadBuilder_ == null) {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
} else {
return workloadBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkload(com.google.cloud.assuredworkloads.v1.Workload value) {
if (workloadBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
workload_ = value;
} else {
workloadBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkload(
com.google.cloud.assuredworkloads.v1.Workload.Builder builderForValue) {
if (workloadBuilder_ == null) {
workload_ = builderForValue.build();
} else {
workloadBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeWorkload(com.google.cloud.assuredworkloads.v1.Workload value) {
if (workloadBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& workload_ != null
&& workload_ != com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()) {
getWorkloadBuilder().mergeFrom(value);
} else {
workload_ = value;
}
} else {
workloadBuilder_.mergeFrom(value);
}
if (workload_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearWorkload() {
bitField0_ = (bitField0_ & ~0x00000001);
workload_ = null;
if (workloadBuilder_ != null) {
workloadBuilder_.dispose();
workloadBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.assuredworkloads.v1.Workload.Builder getWorkloadBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getWorkloadFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder getWorkloadOrBuilder() {
if (workloadBuilder_ != null) {
return workloadBuilder_.getMessageOrBuilder();
} else {
return workload_ == null
? com.google.cloud.assuredworkloads.v1.Workload.getDefaultInstance()
: workload_;
}
}
/**
*
*
* <pre>
* Required. The workload to update.
* The workload's `name` field is used to identify the workload to be updated.
* Format:
* organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
* </pre>
*
* <code>
* .google.cloud.assuredworkloads.v1.Workload workload = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>
getWorkloadFieldBuilder() {
if (workloadBuilder_ == null) {
workloadBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.assuredworkloads.v1.Workload,
com.google.cloud.assuredworkloads.v1.Workload.Builder,
com.google.cloud.assuredworkloads.v1.WorkloadOrBuilder>(
getWorkload(), getParentForChildren(), isClean());
workload_ = null;
}
return workloadBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1.UpdateWorkloadRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1.UpdateWorkloadRequest)
private static final com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest();
}
public static com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateWorkloadRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateWorkloadRequest>() {
@java.lang.Override
public UpdateWorkloadRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateWorkloadRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateWorkloadRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.assuredworkloads.v1.UpdateWorkloadRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/derby | 34,840 | java/org.apache.derby.engine/org/apache/derby/impl/services/locks/ConcurrentLockSet.java | /*
Derby - Class org.apache.derby.impl.services.locks.ConcurrentLockSet
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.services.locks;
import org.apache.derby.iapi.services.locks.CompatibilitySpace;
import org.apache.derby.iapi.services.locks.Latch;
import org.apache.derby.iapi.services.locks.Lockable;
import org.apache.derby.iapi.services.locks.C_LockFactory;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.services.diag.DiagnosticUtil;
import org.apache.derby.shared.common.reference.Property;
import org.apache.derby.shared.common.reference.SQLState;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.ConcurrentHashMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Enumeration;
import java.util.Map;
/**
A ConcurrentLockSet is a complete lock table which maps
<code>Lockable</code>s to <code>LockControl</code> objects.
<P>
A LockControl contains information about the locks held on a Lockable.
<BR>
MT - Mutable : All public methods of this class, except addWaiters, are
thread safe. addWaiters can only be called from the thread which performs
deadlock detection. Only one thread can perform deadlock detection at a
time.
<BR>
The class creates ActiveLock and LockControl objects.
LockControl objects are never passed out of this class, All the methods of
LockControl are called while holding a ReentrantLock associated with the
Lockable controlled by the LockControl, thus providing the
single threading that LockControl required.
Methods of Lockables are only called by this class or LockControl, and
always while holding the corresponding ReentrantLock, thus providing the
single threading that Lockable requires.
@see LockControl
*/
final class ConcurrentLockSet implements LockTable {
/*
** Fields
*/
private final AbstractPool factory;
/** Hash table which maps <code>Lockable</code> objects to
* <code>Lock</code>s. */
private final ConcurrentHashMap<Lockable, Entry> locks;
/**
* List containing all entries seen by the last call to
* <code>addWaiters()</code>. Makes it possible for the deadlock detection
* thread to lock all the entries it has visited until it has
* finished. This prevents false deadlocks from being reported (because all
* observed waiters must still be waiting when the deadlock detection has
* completed).
*/
private ArrayList<Entry> seenByDeadlockDetection;
/**
Timeout for deadlocks, in ms.
<BR>
MT - immutable
*/
private int deadlockTimeout = Property.DEADLOCK_TIMEOUT_DEFAULT * 1000;
private int waitTimeout = Property.WAIT_TIMEOUT_DEFAULT * 1000;
//EXCLUDE-START-lockdiag-
// this varible is set and get without synchronization.
// Only one thread should be setting it at one time.
private boolean deadlockTrace;
//EXCLUDE-END-lockdiag-
// The number of waiters for locks
private final AtomicInteger blockCount;
/*
** Constructor
*/
ConcurrentLockSet(AbstractPool factory) {
this.factory = factory;
blockCount = new AtomicInteger();
locks = new ConcurrentHashMap<Lockable, Entry>();
}
/**
* Class representing an entry in the lock table.
*/
private static final class Entry {
/** The lock control. */
Control control;
/**
* Mutex used to ensure single-threaded access to the LockControls. To
* avoid Java deadlocks, no thread should ever hold the mutex of more
* than one entry. Excepted from this requirement is a thread which
* performs deadlock detection. During deadlock detection, a thread
* might hold several mutexes, but it is not allowed to hold any mutex
* when entering the deadlock detection. Only one thread is allowed to
* perform deadlock detection at a time.
*/
private final ReentrantLock mutex = new ReentrantLock();
/**
* Condition variable which prevents calls to <code>lock()</code> from
* locking the entry. If it is not <code>null</code>, only the thread
* performing deadlock detection may lock the entry (by calling
* <code>lockForDeadlockDetection()</code>).
*/
private Condition deadlockDetection;
/**
* Lock the entry, ensuring exclusive access to the contained
* <code>Control</code> object. The call will block until the entry can
* be locked. If the entry is unlocked and
* <code>deadlockDetection</code> is not <code>null</code>, the entry
* belongs to a thread which waits for deadlock detection to be
* initiated, and the call will block until that thread has finished
* its deadlock detection.
*/
void lock() {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(!mutex.isHeldByCurrentThread());
}
mutex.lock();
while (deadlockDetection != null) {
deadlockDetection.awaitUninterruptibly();
}
}
/**
* Unlock the entry, allowing other threads to lock and access the
* contained <code>Control</code> object.
*/
void unlock() {
mutex.unlock();
}
/**
* Lock the entry while performing deadlock detection. This method will
* lock the entry even when <code>deadlockDetection</code> is not
* <code>null</code>. If <code>deadlockDetection</code> is not
* <code>null</code>, we know the entry and its <code>Control</code>
* will not be accessed by others until we have finished the deadlock
* detection, so it's OK for us to access it.
*
*/
void lockForDeadlockDetection() {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(!mutex.isHeldByCurrentThread());
}
mutex.lock();
}
/**
* Notify that the lock request that is currently accessing the entry
* will be entering deadlock detection. Unlock the entry to allow the
* current thread or other threads to lock the entry for deadlock
* detection, but set the condition variable to prevent regular locking
* of the entry.
*/
void enterDeadlockDetection() {
deadlockDetection = mutex.newCondition();
mutex.unlock();
}
/**
* Notify that the deadlock detection triggered by the current thread
* has finished. Re-lock the entry and notify any waiters that the
* deadlock detection has completed.
*/
void exitDeadlockDetection() {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(!mutex.isHeldByCurrentThread());
}
mutex.lock();
deadlockDetection.signalAll();
deadlockDetection = null;
}
}
/**
* Get an entry from the lock table. If no entry exists for the
* <code>Lockable</code>, insert an entry. The returned entry will be
* locked and is guaranteed to still be present in the table.
*
* @param ref the <code>Lockable</code> whose entry to return
* @return the entry for the <code>Lockable</code>, locked for exclusive
* access
*/
private Entry getEntry(Lockable ref) {
Entry e = locks.get(ref);
while (true) {
if (e != null) {
e.lock();
if (e.control != null) {
// entry is found and in use, return it
return e;
}
// entry is empty, hence it was removed from the table after we
// retrieved it. Try to reuse it later.
} else {
// no entry found, create a new one
e = new Entry();
e.lock();
}
// reinsert empty entry, or insert the new entry
Entry current = locks.putIfAbsent(ref, e);
if (current == null) {
// successfully (re-)inserted entry, return it
return e;
}
// someone beat us, unlock the old entry and retry with the entry
// they inserted
e.unlock();
e = current;
}
}
/**
* Check whether there is a deadlock. Make sure that only one thread enters
* deadlock detection at a time.
*
* @param entry the entry in the lock table for the lock request that
* triggered deadlock detection
* @param waitingLock the waiting lock
* @param wakeupReason the reason for waking up the waiter
* @return an object describing the deadlock
*/
private Object[] checkDeadlock(Entry entry, ActiveLock waitingLock,
byte wakeupReason) {
LockControl control = (LockControl) entry.control;
// make sure that the entry is not blocking other threads performing
// deadlock detection since we have to wait for them to finish
entry.enterDeadlockDetection();
synchronized (Deadlock.class) {
try {
return Deadlock.look(factory, this, control, waitingLock,
wakeupReason);
} finally {
// unlock all entries we visited
for (Entry e : seenByDeadlockDetection) {
e.unlock();
}
seenByDeadlockDetection = null;
// re-lock the entry
entry.exitDeadlockDetection();
}
}
}
/*
** Public Methods
*/
/**
* Lock an object within a specific compatibility space.
*
* @param compatibilitySpace Compatibility space.
* @param ref Lockable reference.
* @param qualifier Qualifier.
* @param timeout Timeout in milli-seconds
*
* @return Object that represents the lock.
*
* @exception StandardException Standard Derby policy.
*/
public Lock lockObject(CompatibilitySpace compatibilitySpace, Lockable ref,
Object qualifier, int timeout)
throws StandardException
{
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON("memoryLeakTrace")) {
if (locks.size() > 1000)
System.out.println("memoryLeakTrace:LockSet: " +
locks.size());
}
}
LockControl control;
Lock lockItem;
String lockDebug = null;
boolean blockedByParent = false;
Entry entry = getEntry(ref);
try {
Control gc = entry.control;
if (gc == null) {
// object is not locked, can be granted
Lock gl = new Lock(compatibilitySpace, ref, qualifier);
gl.grant();
entry.control = gl;
return gl;
}
control = gc.getLockControl();
if (control != gc) {
entry.control = control;
}
if (SanityManager.DEBUG) {
SanityManager.ASSERT(ref.equals(control.getLockable()));
// ASSERT item is in the list
SanityManager.ASSERT(
locks.get(control.getLockable()).control == control);
}
lockItem = control.addLock(this, compatibilitySpace, qualifier);
if (lockItem.getCount() != 0) {
return lockItem;
}
//
// This logic supports the use-case of DERBY-6554.
//
blockedByParent =
(timeout == 0) &&
compatibilitySpace.getOwner().isNestedOwner() &&
control.blockedByParent( lockItem );
if (
AbstractPool.noLockWait(timeout, compatibilitySpace) ||
blockedByParent
)
{
// remove all trace of lock
control.giveUpWait(lockItem, this);
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("DeadlockTrace"))
{
SanityManager.showTrace(new Throwable());
// The following dumps the lock table as it
// exists at the time a timeout is about to
// cause a deadlock exception to be thrown.
lockDebug =
DiagnosticUtil.toDiagString(lockItem) +
"\nCould not grant lock with zero timeout, " +
"here's the table";
// We cannot hold a lock on an entry while calling
// toDebugString() since it will lock other entries in
// the lock table. Holding the lock could cause a
// deadlock.
entry.unlock();
try {
lockDebug += toDebugString();
} finally {
// Re-lock the entry so that the outer finally
// clause doesn't fail.
entry.lock();
}
}
}
return null;
}
} finally {
entry.unlock();
if ( blockedByParent )
{
throw StandardException.newException
( SQLState.SELF_DEADLOCK );
}
}
boolean deadlockWait = false;
int actualTimeout;
if (timeout == C_LockFactory.WAIT_FOREVER)
{
// always check for deadlocks as there should not be any
deadlockWait = true;
if ((actualTimeout = deadlockTimeout) == C_LockFactory.WAIT_FOREVER)
actualTimeout = Property.DEADLOCK_TIMEOUT_DEFAULT * 1000;
}
else
{
if (timeout == C_LockFactory.TIMED_WAIT)
timeout = actualTimeout = waitTimeout;
else
actualTimeout = timeout;
// five posible cases
// i) timeout -1, deadlock -1 ->
// just wait forever, no deadlock check
// ii) timeout >= 0, deadlock -1 ->
// just wait for timeout, no deadlock check
// iii) timeout -1, deadlock >= 0 ->
// wait for deadlock, then deadlock check,
// then infinite timeout
// iv) timeout >=0, deadlock < timeout ->
// wait for deadlock, then deadlock check,
// then wait for (timeout - deadlock)
// v) timeout >=0, deadlock >= timeout ->
// just wait for timeout, no deadlock check
if (deadlockTimeout >= 0) {
if (actualTimeout < 0) {
// infinite wait but perform a deadlock check first
deadlockWait = true;
actualTimeout = deadlockTimeout;
} else if (deadlockTimeout < actualTimeout) {
// deadlock wait followed by a timeout wait
deadlockWait = true;
actualTimeout = deadlockTimeout;
// leave timeout as the remaining time
timeout -= deadlockTimeout;
}
}
}
ActiveLock waitingLock = (ActiveLock) lockItem;
lockItem = null;
int earlyWakeupCount = 0;
long startWaitTime = 0;
forever: for (;;) {
byte wakeupReason = 0;
ActiveLock nextWaitingLock = null;
Object[] deadlockData = null;
try {
try {
wakeupReason = waitingLock.waitForGrant(actualTimeout);
} catch(StandardException e) {
// DERBY-4711: If waitForGrant() fails, we need to
// remove ourselves from the queue so that those
// behind us in the queue don't get stuck waiting for
// us.
nextWaitingLock = control.getNextWaiter(waitingLock, true, this);
throw e;
}
boolean willQuitWait;
Enumeration timeoutLockTable = null;
long currentTime = 0;
entry.lock();
try {
if (control.isGrantable(
control.firstWaiter() == waitingLock,
compatibilitySpace,
qualifier)) {
// Yes, we are granted, put us on the granted queue.
control.grant(waitingLock);
// Remove from the waiting queue & get next waiter
nextWaitingLock =
control.getNextWaiter(waitingLock, true, this);
return waitingLock;
}
// try again later
waitingLock.clearPotentiallyGranted();
willQuitWait =
(wakeupReason != Constants.WAITING_LOCK_GRANT);
if (((wakeupReason == Constants.WAITING_LOCK_IN_WAIT) &&
deadlockWait) ||
(wakeupReason == Constants.WAITING_LOCK_DEADLOCK))
{
// check for a deadlock, even if we were woken up
// because we were selected as a victim we still
// check because the situation may have changed.
deadlockData =
checkDeadlock(entry, waitingLock, wakeupReason);
if (deadlockData == null) {
// we don't have a deadlock
deadlockWait = false;
actualTimeout = timeout;
startWaitTime = 0;
willQuitWait = false;
} else {
willQuitWait = true;
}
}
nextWaitingLock =
control.getNextWaiter(
waitingLock, willQuitWait, this);
// If we were not woken by another then we have
// timed out. Either deadlock out or timeout
if (SanityManager.DEBUG &&
SanityManager.DEBUG_ON("DeadlockTrace") &&
willQuitWait) {
// Generate the first part of the debug message
// while holding the lock on entry, so that we have
// exclusive access to waitingLock. Wait until the
// entry has been unlocked before appending the
// contents of the lock table (to avoid deadlocks).
lockDebug =
DiagnosticUtil.toDiagString(waitingLock) +
"\nGot deadlock/timeout, here's the table";
}
} finally {
entry.unlock();
}
// need to do this outside of the synchronized block as the
// message text building (timeouts and deadlocks) may
// involve getting locks to look up table names from
// identifiers.
if (willQuitWait)
{
if (deadlockTrace && (deadlockData == null)) {
// if ending lock request due to lock timeout
// want a copy of the LockTable and the time,
// in case of deadlock deadlockData has the
// info we need.
currentTime = System.currentTimeMillis();
timeoutLockTable =
factory.makeVirtualLockTable();
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("DeadlockTrace")) {
SanityManager.showTrace(new Throwable());
// The following dumps the lock table as it
// exists at the time a timeout is about to
// cause a deadlock exception to be thrown.
lockDebug += toDebugString();
}
if (lockDebug != null)
{
String type =
((deadlockData != null) ?
"deadlock:" : "timeout:");
SanityManager.DEBUG_PRINT(
type,
"wait on lockitem caused " + type +
lockDebug);
}
}
if (deadlockData == null)
{
// ending wait because of lock timeout or interrupt
if (wakeupReason ==
Constants.WAITING_LOCK_INTERRUPTED) {
throw StandardException.
newException(SQLState.CONN_INTERRUPT);
} else if (deadlockTrace)
{
// Turn ON derby.locks.deadlockTrace to build
// the lockTable.
throw Timeout.buildException(
waitingLock, timeoutLockTable, currentTime);
}
else
{
StandardException se =
StandardException.newException(
SQLState.LOCK_TIMEOUT);
throw se;
}
}
else
{
// ending wait because of lock deadlock.
throw Deadlock.buildException(
factory, deadlockData);
}
}
} finally {
if (nextWaitingLock != null) {
nextWaitingLock.wakeUp(Constants.WAITING_LOCK_GRANT);
nextWaitingLock = null;
}
}
if (actualTimeout != C_LockFactory.WAIT_FOREVER) {
if (wakeupReason != Constants.WAITING_LOCK_IN_WAIT)
earlyWakeupCount++;
if (earlyWakeupCount > 5) {
long now = System.currentTimeMillis();
if (startWaitTime != 0) {
long sleepTime = now - startWaitTime;
actualTimeout -= sleepTime;
}
startWaitTime = now;
}
}
} // for(;;)
}
/**
Unlock an object, previously locked by lockObject().
If unlockCOunt is not zero then the lock will be unlocked
that many times, otherwise the unlock count is taken from
item.
*/
public void unlock(Latch item, int unlockCount) {
// assume LockEntry is there
Entry entry = locks.get(item.getLockable());
entry.lock();
try {
unlock(entry, item, unlockCount);
} finally {
entry.unlock();
}
}
/**
* Unlock an object, previously locked by lockObject().
*
* @param entry the entry in which the lock is contained (the current
* thread must have locked the entry)
* @param item the item to unlock
* @param unlockCount the number of times to unlock the item (if zero, take
* the unlock count from item)
*/
private void unlock(Entry entry, Latch item, int unlockCount) {
if (SanityManager.DEBUG) {
SanityManager.ASSERT(entry.mutex.isHeldByCurrentThread());
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
/*
** I don't like checking the trace flag twice, but SanityManager
** doesn't provide a way to get to the debug trace stream
** directly.
*/
SanityManager.DEBUG(
Constants.LOCK_TRACE,
"Release lock: " + DiagnosticUtil.toDiagString(item));
}
}
boolean tryGrant = false;
ActiveLock nextGrant = null;
Control control = entry.control;
if (SanityManager.DEBUG) {
// only valid Lock's expected
if (item.getLockable() == null)
{
SanityManager.THROWASSERT(
"item.getLockable() = null." +
"unlockCount " + unlockCount +
"item = " + DiagnosticUtil.toDiagString(item));
}
// only valid Lock's expected
if (control == null)
{
SanityManager.THROWASSERT(
"control = null." +
"unlockCount " + unlockCount +
"item = " + DiagnosticUtil.toDiagString(item));
}
SanityManager.ASSERT(
locks.get(control.getLockable()).control == control);
if ((unlockCount != 0) && (unlockCount > item.getCount()))
SanityManager.THROWASSERT("unlockCount " + unlockCount +
" larger than actual lock count " + item.getCount() + " item " + item);
}
tryGrant = control.unlock(item, unlockCount);
item = null;
boolean mayBeEmpty = true;
if (tryGrant) {
nextGrant = control.firstWaiter();
if (nextGrant != null) {
mayBeEmpty = false;
if (!nextGrant.setPotentiallyGranted())
nextGrant = null;
}
}
if (mayBeEmpty) {
if (control.isEmpty()) {
// no-one granted, no-one waiting, remove lock control
locks.remove(control.getLockable());
entry.control = null;
}
return;
}
if (tryGrant && (nextGrant != null)) {
nextGrant.wakeUp(Constants.WAITING_LOCK_GRANT);
}
}
/**
* Unlock an object once if it is present in the specified group. Also
* remove the object from the group.
*
* @param space the compatibility space
* @param ref a reference to the locked object
* @param qualifier qualifier of the lock
* @param group a map representing the locks in a group
* @return the corresponding lock in the group map, or <code>null</code> if
* the object was not unlocked
*/
public Lock unlockReference(CompatibilitySpace space, Lockable ref,
Object qualifier, Map group) {
Entry entry = locks.get(ref);
if (entry == null) {
return null;
}
entry.lock();
try {
Control control = entry.control;
if (control == null) {
return null;
}
Lock setLock = control.getLock(space, qualifier);
if (setLock == null) {
return null;
}
Lock lockInGroup = (Lock) group.remove(setLock);
if (lockInGroup != null) {
unlock(entry, lockInGroup, 1);
}
return lockInGroup;
} finally {
entry.unlock();
}
}
/**
* {@inheritDoc}
*/
public boolean zeroDurationLockObject(
CompatibilitySpace space, Lockable ref, Object qualifier, int timeout)
throws StandardException {
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
D_LockControl.debugLock(
"Zero Duration Lock Request before Grant: ",
space, null, ref, qualifier, timeout);
if (SanityManager.DEBUG_ON(Constants.LOCK_STACK_TRACE)) {
// The following will print the stack trace of the lock
// request to the log.
Throwable t = new Throwable();
java.io.PrintWriter istream =
SanityManager.GET_DEBUG_STREAM();
istream.println("Stack trace of lock request:");
t.printStackTrace(istream);
}
}
}
// Very fast zeroDurationLockObject() for unlocked objects.
// If no entry exists in the lock manager for this reference
// then it must be unlocked.
// If the object is locked then we perform a grantable
// check, skipping over any waiters.
// If the caller wants to wait and the lock cannot
// be granted then we do the slow join the queue and
// release the lock method.
Entry entry = locks.get(ref);
if (entry == null) {
return true;
}
entry.lock();
try {
Control control = entry.control;
if (control == null) {
return true;
}
// If we are grantable, ignoring waiting locks then
// we can also grant this request now, as skipping
// over the waiters won't block them as we release
// the lock rightway.
if (control.isGrantable(true, space, qualifier)) {
return true;
}
// can't be granted and are not willing to wait.
if (AbstractPool.noLockWait(timeout, space)) {
return false;
}
} finally {
entry.unlock();
}
Lock lock = lockObject(space, ref, qualifier, timeout);
if (SanityManager.DEBUG) {
if (SanityManager.DEBUG_ON(Constants.LOCK_TRACE)) {
D_LockControl.debugLock(
"Zero Lock Request Granted: ",
space, null, ref, qualifier, timeout);
}
}
// and simply unlock it once
unlock(lock, 1);
return true;
}
/**
* Set the deadlock timeout.
*
* @param timeout deadlock timeout in milliseconds
*/
public void setDeadlockTimeout(int timeout) {
deadlockTimeout = timeout;
}
/**
* Set the wait timeout.
*
* @param timeout wait timeout in milliseconds
*/
public void setWaitTimeout(int timeout) {
waitTimeout = timeout;
}
/**
* Get the wait timeout in milliseconds.
*/
public int getWaitTimeout() { return waitTimeout; }
/*
** Non public methods
*/
//EXCLUDE-START-lockdiag-
public void setDeadlockTrace(boolean val)
{
// set this without synchronization
deadlockTrace = val;
}
//EXCLUDE-END-lockdiag-
private String toDebugString()
{
if (SanityManager.DEBUG)
{
String str = "";
int i = 0;
for (Entry entry : locks.values())
{
entry.lock();
try {
str += "\n lock[" + i + "]: " +
DiagnosticUtil.toDiagString(entry.control);
} finally {
entry.unlock();
}
}
return(str);
}
else
{
return(null);
}
}
/**
* Add all waiters in this lock table to a <code>Map</code> object.
* This method can only be called by the thread that is currently
* performing deadlock detection. All entries that are visited in the lock
* table will be locked when this method returns. The entries that have
* been seen and locked will be unlocked after the deadlock detection has
* finished.
*/
public void addWaiters(Map<Object,Object> waiters) {
seenByDeadlockDetection = new ArrayList<Entry>(locks.size());
for (Entry entry : locks.values()) {
seenByDeadlockDetection.add(entry);
entry.lockForDeadlockDetection();
if (entry.control != null) {
entry.control.addWaiters(waiters);
}
}
}
//EXCLUDE-START-lockdiag-
/**
* make a shallow clone of myself and my lock controls
*/
public Map<Lockable, Control> shallowClone() {
HashMap<Lockable, Control> clone = new HashMap<Lockable, Control>();
for (Entry entry : locks.values()) {
entry.lock();
try {
Control control = entry.control;
if (control != null) {
clone.put(control.getLockable(), control.shallowClone());
}
} finally {
entry.unlock();
}
}
return clone;
}
//EXCLUDE-END-lockdiag-
/**
* Increase blockCount by one.
*/
public void oneMoreWaiter() {
blockCount.incrementAndGet();
}
/**
* Decrease blockCount by one.
*/
public void oneLessWaiter() {
blockCount.decrementAndGet();
}
/**
* Check whether anyone is blocked.
* @return <code>true</code> if someone is blocked, <code>false</code>
* otherwise
*/
public boolean anyoneBlocked() {
int blocked = blockCount.get();
if (SanityManager.DEBUG) {
SanityManager.ASSERT(
blocked >= 0, "blockCount should not be negative");
}
return blocked != 0;
}
}
|
google/ExoPlayer | 35,501 | library/transformer/src/main/java/com/google/android/exoplayer2/transformer/Transformer.java | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static java.lang.annotation.ElementType.TYPE_USE;
import android.content.Context;
import android.os.Looper;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.audio.AudioProcessor;
import com.google.android.exoplayer2.effect.DebugTraceUtil;
import com.google.android.exoplayer2.effect.DefaultVideoFrameProcessor;
import com.google.android.exoplayer2.effect.Presentation;
import com.google.android.exoplayer2.source.DefaultMediaSourceFactory;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.DebugViewProvider;
import com.google.android.exoplayer2.util.Effect;
import com.google.android.exoplayer2.util.HandlerWrapper;
import com.google.android.exoplayer2.util.ListenerSet;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.util.VideoFrameProcessor;
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.InlineMe;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* A transformer to export media inputs.
*
* <p>The same Transformer instance can be used to export multiple inputs (sequentially, not
* concurrently).
*
* <p>Transformer instances must be accessed from a single application thread. For the vast majority
* of cases this should be the application's main thread. The thread on which a Transformer instance
* must be accessed can be explicitly specified by passing a {@link Looper} when creating the
* transformer. If no Looper is specified, then the Looper of the thread that the {@link
* Transformer.Builder} is created on is used, or if that thread does not have a Looper, the Looper
* of the application's main thread is used. In all cases the Looper of the thread from which the
* transformer must be accessed can be queried using {@link #getApplicationLooper()}.
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@Deprecated
public final class Transformer {
static {
ExoPlayerLibraryInfo.registerModule("goog.exo.transformer");
}
/** A builder for {@link Transformer} instances. */
public static final class Builder {
// Mandatory field.
private final Context context;
// Optional fields.
private TransformationRequest transformationRequest;
private ImmutableList<AudioProcessor> audioProcessors;
private ImmutableList<Effect> videoEffects;
private boolean removeAudio;
private boolean removeVideo;
private boolean flattenForSlowMotion;
private ListenerSet<Transformer.Listener> listeners;
private AssetLoader.@MonotonicNonNull Factory assetLoaderFactory;
private VideoFrameProcessor.Factory videoFrameProcessorFactory;
private Codec.EncoderFactory encoderFactory;
private Muxer.Factory muxerFactory;
private Looper looper;
private DebugViewProvider debugViewProvider;
private Clock clock;
/**
* Creates a builder with default values.
*
* @param context The {@link Context}.
*/
public Builder(Context context) {
this.context = context.getApplicationContext();
transformationRequest = new TransformationRequest.Builder().build();
audioProcessors = ImmutableList.of();
videoEffects = ImmutableList.of();
videoFrameProcessorFactory = new DefaultVideoFrameProcessor.Factory.Builder().build();
encoderFactory = new DefaultEncoderFactory.Builder(this.context).build();
muxerFactory = new DefaultMuxer.Factory();
looper = Util.getCurrentOrMainLooper();
debugViewProvider = DebugViewProvider.NONE;
clock = Clock.DEFAULT;
listeners = new ListenerSet<>(looper, clock, (listener, flags) -> {});
}
/** Creates a builder with the values of the provided {@link Transformer}. */
private Builder(Transformer transformer) {
this.context = transformer.context;
this.transformationRequest = transformer.transformationRequest;
this.audioProcessors = transformer.audioProcessors;
this.videoEffects = transformer.videoEffects;
this.removeAudio = transformer.removeAudio;
this.removeVideo = transformer.removeVideo;
this.listeners = transformer.listeners;
this.assetLoaderFactory = transformer.assetLoaderFactory;
this.videoFrameProcessorFactory = transformer.videoFrameProcessorFactory;
this.encoderFactory = transformer.encoderFactory;
this.muxerFactory = transformer.muxerFactory;
this.looper = transformer.looper;
this.debugViewProvider = transformer.debugViewProvider;
this.clock = transformer.clock;
}
/**
* Sets the {@link TransformationRequest} which configures the editing and transcoding options.
*
* <p>Actual applied values may differ, per device capabilities. {@link
* Listener#onFallbackApplied(Composition, TransformationRequest, TransformationRequest)} will
* be invoked with the actual applied values.
*
* @param transformationRequest The {@link TransformationRequest}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setTransformationRequest(TransformationRequest transformationRequest) {
this.transformationRequest = transformationRequest;
return this;
}
/**
* @deprecated Set the {@linkplain AudioProcessor audio processors} in an {@link
* EditedMediaItem}, and pass it to {@link #start(EditedMediaItem, String)} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setAudioProcessors(List<AudioProcessor> audioProcessors) {
this.audioProcessors = ImmutableList.copyOf(audioProcessors);
return this;
}
/**
* @deprecated Set the {@linkplain Effect video effects} in an {@link EditedMediaItem}, and pass
* it to {@link #start(EditedMediaItem, String)} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setVideoEffects(List<Effect> effects) {
this.videoEffects = ImmutableList.copyOf(effects);
return this;
}
/**
* @deprecated Use {@link EditedMediaItem.Builder#setRemoveAudio(boolean)} to remove the audio
* from the {@link EditedMediaItem} passed to {@link #start(EditedMediaItem, String)}
* instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setRemoveAudio(boolean removeAudio) {
this.removeAudio = removeAudio;
return this;
}
/**
* @deprecated Use {@link EditedMediaItem.Builder#setRemoveVideo(boolean)} to remove the video
* from the {@link EditedMediaItem} passed to {@link #start(EditedMediaItem, String)}
* instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setRemoveVideo(boolean removeVideo) {
this.removeVideo = removeVideo;
return this;
}
/**
* @deprecated Use {@link EditedMediaItem.Builder#setFlattenForSlowMotion(boolean)} to flatten
* the {@link EditedMediaItem} passed to {@link #start(EditedMediaItem, String)} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setFlattenForSlowMotion(boolean flattenForSlowMotion) {
this.flattenForSlowMotion = flattenForSlowMotion;
return this;
}
/**
* @deprecated Use {@link #addListener(Listener)}, {@link #removeListener(Listener)} or {@link
* #removeAllListeners()} instead.
*/
@CanIgnoreReturnValue
@Deprecated
public Builder setListener(Transformer.Listener listener) {
this.listeners.clear();
this.listeners.add(listener);
return this;
}
/**
* Adds a {@link Transformer.Listener} to listen to the export events.
*
* <p>This is equivalent to {@link Transformer#addListener(Listener)}.
*
* @param listener A {@link Transformer.Listener}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder addListener(Transformer.Listener listener) {
this.listeners.add(listener);
return this;
}
/**
* Removes a {@link Transformer.Listener}.
*
* <p>This is equivalent to {@link Transformer#removeListener(Listener)}.
*
* @param listener A {@link Transformer.Listener}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder removeListener(Transformer.Listener listener) {
this.listeners.remove(listener);
return this;
}
/**
* Removes all {@linkplain Transformer.Listener listeners}.
*
* <p>This is equivalent to {@link Transformer#removeAllListeners()}.
*
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder removeAllListeners() {
this.listeners.clear();
return this;
}
/**
* Sets the {@link AssetLoader.Factory} to be used to retrieve the samples to export.
*
* <p>The default value is a {@link DefaultAssetLoaderFactory} built with a {@link
* DefaultMediaSourceFactory} and a {@link DefaultDecoderFactory}.
*
* @param assetLoaderFactory An {@link AssetLoader.Factory}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setAssetLoaderFactory(AssetLoader.Factory assetLoaderFactory) {
this.assetLoaderFactory = assetLoaderFactory;
return this;
}
/**
* Sets the factory to be used to create {@link VideoFrameProcessor} instances.
*
* <p>The default value is a {@link DefaultVideoFrameProcessor.Factory} built with default
* values.
*
* @param videoFrameProcessorFactory A {@link VideoFrameProcessor.Factory}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setVideoFrameProcessorFactory(
VideoFrameProcessor.Factory videoFrameProcessorFactory) {
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
return this;
}
/**
* Sets the {@link Codec.EncoderFactory} that will be used by the transformer.
*
* <p>The default value is a {@link DefaultEncoderFactory} instance.
*
* @param encoderFactory The {@link Codec.EncoderFactory} instance.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setEncoderFactory(Codec.EncoderFactory encoderFactory) {
this.encoderFactory = encoderFactory;
return this;
}
/**
* Sets the factory for muxers that write the media container.
*
* <p>The default value is a {@link DefaultMuxer.Factory}.
*
* @param muxerFactory A {@link Muxer.Factory}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setMuxerFactory(Muxer.Factory muxerFactory) {
this.muxerFactory = muxerFactory;
return this;
}
/**
* Sets the {@link Looper} that must be used for all calls to the transformer and that is used
* to call listeners on.
*
* <p>The default value is the Looper of the thread that this builder was created on, or if that
* thread does not have a Looper, the Looper of the application's main thread.
*
* @param looper A {@link Looper}.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setLooper(Looper looper) {
this.looper = looper;
this.listeners = listeners.copy(looper, (listener, flags) -> {});
return this;
}
/**
* Sets a provider for views to show diagnostic information (if available) during export.
*
* <p>This is intended for debugging. The default value is {@link DebugViewProvider#NONE}, which
* doesn't show any debug info.
*
* <p>Not all exports will result in debug views being populated.
*
* @param debugViewProvider Provider for debug views.
* @return This builder.
*/
@CanIgnoreReturnValue
public Builder setDebugViewProvider(DebugViewProvider debugViewProvider) {
this.debugViewProvider = debugViewProvider;
return this;
}
/**
* Sets the {@link Clock} that will be used by the transformer.
*
* <p>The default value is {@link Clock#DEFAULT}.
*
* @param clock The {@link Clock} instance.
* @return This builder.
*/
@CanIgnoreReturnValue
@VisibleForTesting
/* package */ Builder setClock(Clock clock) {
this.clock = clock;
this.listeners = listeners.copy(looper, clock, (listener, flags) -> {});
return this;
}
/**
* Builds a {@link Transformer} instance.
*
* @throws IllegalStateException If both audio and video have been removed (otherwise the output
* would not contain any samples).
* @throws IllegalStateException If the muxer doesn't support the requested audio/video MIME
* type.
*/
public Transformer build() {
if (transformationRequest.audioMimeType != null) {
checkSampleMimeType(transformationRequest.audioMimeType);
}
if (transformationRequest.videoMimeType != null) {
checkSampleMimeType(transformationRequest.videoMimeType);
}
if (assetLoaderFactory == null) {
assetLoaderFactory =
new DefaultAssetLoaderFactory(
context,
new DefaultDecoderFactory(context),
/* forceInterpretHdrAsSdr= */ transformationRequest.hdrMode
== TransformationRequest.HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR,
clock);
}
return new Transformer(
context,
transformationRequest,
audioProcessors,
videoEffects,
removeAudio,
removeVideo,
flattenForSlowMotion,
listeners,
assetLoaderFactory,
videoFrameProcessorFactory,
encoderFactory,
muxerFactory,
looper,
debugViewProvider,
clock);
}
private void checkSampleMimeType(String sampleMimeType) {
checkState(
muxerFactory
.getSupportedSampleMimeTypes(MimeTypes.getTrackType(sampleMimeType))
.contains(sampleMimeType),
"Unsupported sample MIME type " + sampleMimeType);
}
}
/**
* A listener for the export events.
*
* <p>If the export is not cancelled, either {@link #onError} or {@link #onCompleted} will be
* called once for each export.
*/
public interface Listener {
/**
* @deprecated Use {@link #onCompleted(Composition, ExportResult)} instead.
*/
@Deprecated
default void onTransformationCompleted(MediaItem inputMediaItem) {}
/**
* @deprecated Use {@link #onCompleted(Composition, ExportResult)} instead.
*/
@Deprecated
default void onTransformationCompleted(MediaItem inputMediaItem, TransformationResult result) {
onTransformationCompleted(inputMediaItem);
}
/**
* Called when the export is completed successfully.
*
* @param composition The {@link Composition} for which the export is completed.
* @param exportResult The {@link ExportResult} of the export.
*/
@SuppressWarnings("deprecation") // Calling deprecated listener method.
default void onCompleted(Composition composition, ExportResult exportResult) {
MediaItem mediaItem = composition.sequences.get(0).editedMediaItems.get(0).mediaItem;
onTransformationCompleted(mediaItem, new TransformationResult.Builder(exportResult).build());
}
/**
* @deprecated Use {@link #onError(Composition, ExportResult, ExportException)} instead.
*/
@Deprecated
default void onTransformationError(MediaItem inputMediaItem, Exception exception) {}
/**
* @deprecated Use {@link #onError(Composition, ExportResult, ExportException)} instead.
*/
@Deprecated
default void onTransformationError(
MediaItem inputMediaItem, TransformationException exception) {
onTransformationError(inputMediaItem, (Exception) exception);
}
/**
* @deprecated Use {@link #onError(Composition, ExportResult, ExportException)} instead.
*/
@Deprecated
default void onTransformationError(
MediaItem inputMediaItem, TransformationResult result, TransformationException exception) {
onTransformationError(inputMediaItem, exception);
}
/**
* Called if an exception occurs during the export.
*
* <p>The export output file (if any) is not deleted in this case.
*
* @param composition The {@link Composition} for which the exception occurs.
* @param exportResult The {@link ExportResult} of the export.
* @param exportException The {@link ExportException} describing the exception. This is the same
* instance as the {@linkplain ExportResult#exportException exception} in {@code result}.
*/
@SuppressWarnings("deprecation") // Calling deprecated listener method.
default void onError(
Composition composition, ExportResult exportResult, ExportException exportException) {
MediaItem mediaItem = composition.sequences.get(0).editedMediaItems.get(0).mediaItem;
onTransformationError(
mediaItem,
new TransformationResult.Builder(exportResult).build(),
new TransformationException(exportException));
}
/**
* @deprecated Use {@link #onFallbackApplied(Composition, TransformationRequest,
* TransformationRequest)} instead.
*/
@Deprecated
default void onFallbackApplied(
MediaItem inputMediaItem,
TransformationRequest originalTransformationRequest,
TransformationRequest fallbackTransformationRequest) {}
/**
* Called when falling back to an alternative {@link TransformationRequest} or changing the
* video frames' resolution is necessary to comply with muxer or device constraints.
*
* @param composition The {@link Composition} for which the export is requested.
* @param originalTransformationRequest The unsupported {@link TransformationRequest} used when
* building {@link Transformer}.
* @param fallbackTransformationRequest The alternative {@link TransformationRequest}, with
* supported {@link TransformationRequest#audioMimeType}, {@link
* TransformationRequest#videoMimeType}, {@link TransformationRequest#outputHeight}, and
* {@link TransformationRequest#hdrMode} values set.
*/
@SuppressWarnings("deprecation") // Calling deprecated listener method.
default void onFallbackApplied(
Composition composition,
TransformationRequest originalTransformationRequest,
TransformationRequest fallbackTransformationRequest) {
MediaItem mediaItem = composition.sequences.get(0).editedMediaItems.get(0).mediaItem;
onFallbackApplied(mediaItem, originalTransformationRequest, fallbackTransformationRequest);
}
}
/**
* Progress state. One of {@link #PROGRESS_STATE_NOT_STARTED}, {@link
* #PROGRESS_STATE_WAITING_FOR_AVAILABILITY}, {@link #PROGRESS_STATE_AVAILABLE} or {@link
* #PROGRESS_STATE_UNAVAILABLE}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target(TYPE_USE)
@IntDef({
PROGRESS_STATE_NOT_STARTED,
PROGRESS_STATE_WAITING_FOR_AVAILABILITY,
PROGRESS_STATE_AVAILABLE,
PROGRESS_STATE_UNAVAILABLE
})
public @interface ProgressState {}
/** Indicates that the corresponding operation hasn't been started. */
public static final int PROGRESS_STATE_NOT_STARTED = 0;
/**
* @deprecated Use {@link #PROGRESS_STATE_NOT_STARTED} instead.
*/
@Deprecated public static final int PROGRESS_STATE_NO_TRANSFORMATION = PROGRESS_STATE_NOT_STARTED;
/** Indicates that the progress is currently unavailable, but might become available. */
public static final int PROGRESS_STATE_WAITING_FOR_AVAILABILITY = 1;
/** Indicates that the progress is available. */
public static final int PROGRESS_STATE_AVAILABLE = 2;
/** Indicates that the progress is permanently unavailable. */
public static final int PROGRESS_STATE_UNAVAILABLE = 3;
private final Context context;
private final TransformationRequest transformationRequest;
private final ImmutableList<AudioProcessor> audioProcessors;
private final ImmutableList<Effect> videoEffects;
private final boolean removeAudio;
private final boolean removeVideo;
private final boolean flattenForSlowMotion;
private final ListenerSet<Transformer.Listener> listeners;
private final AssetLoader.Factory assetLoaderFactory;
private final VideoFrameProcessor.Factory videoFrameProcessorFactory;
private final Codec.EncoderFactory encoderFactory;
private final Muxer.Factory muxerFactory;
private final Looper looper;
private final DebugViewProvider debugViewProvider;
private final Clock clock;
@Nullable private TransformerInternal transformerInternal;
private Transformer(
Context context,
TransformationRequest transformationRequest,
ImmutableList<AudioProcessor> audioProcessors,
ImmutableList<Effect> videoEffects,
boolean removeAudio,
boolean removeVideo,
boolean flattenForSlowMotion,
ListenerSet<Listener> listeners,
AssetLoader.Factory assetLoaderFactory,
VideoFrameProcessor.Factory videoFrameProcessorFactory,
Codec.EncoderFactory encoderFactory,
Muxer.Factory muxerFactory,
Looper looper,
DebugViewProvider debugViewProvider,
Clock clock) {
checkState(!removeAudio || !removeVideo, "Audio and video cannot both be removed.");
this.context = context;
this.transformationRequest = transformationRequest;
this.audioProcessors = audioProcessors;
this.videoEffects = videoEffects;
this.removeAudio = removeAudio;
this.removeVideo = removeVideo;
this.flattenForSlowMotion = flattenForSlowMotion;
this.listeners = listeners;
this.assetLoaderFactory = assetLoaderFactory;
this.videoFrameProcessorFactory = videoFrameProcessorFactory;
this.encoderFactory = encoderFactory;
this.muxerFactory = muxerFactory;
this.looper = looper;
this.debugViewProvider = debugViewProvider;
this.clock = clock;
}
/** Returns a {@link Transformer.Builder} initialized with the values of this instance. */
public Builder buildUpon() {
return new Builder(this);
}
/**
* @deprecated Use {@link #addListener(Listener)}, {@link #removeListener(Listener)} or {@link
* #removeAllListeners()} instead.
*/
@Deprecated
public void setListener(Transformer.Listener listener) {
verifyApplicationThread();
this.listeners.clear();
this.listeners.add(listener);
}
/**
* Adds a {@link Transformer.Listener} to listen to the export events.
*
* @param listener A {@link Transformer.Listener}.
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void addListener(Transformer.Listener listener) {
verifyApplicationThread();
this.listeners.add(listener);
}
/**
* Removes a {@link Transformer.Listener}.
*
* @param listener A {@link Transformer.Listener}.
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void removeListener(Transformer.Listener listener) {
verifyApplicationThread();
this.listeners.remove(listener);
}
/**
* Removes all {@linkplain Transformer.Listener listeners}.
*
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void removeAllListeners() {
verifyApplicationThread();
this.listeners.clear();
}
/**
* Starts an asynchronous operation to export the given {@link Composition}.
*
* <p>This method is under implementation. Only the {@linkplain Composition compositions} meeting
* the below conditions are supported:
*
* <ul>
* <li>There must be no overlapping track corresponding to the same track type in the output.
* More precisely, the composition must either contain a single {@linkplain
* EditedMediaItemSequence sequence}, or contain one audio-only sequence and one
* video/image-only sequence.
* <li>A sequence cannot contain both video and image input.
* <li>A sequence cannot contain both HDR and SDR video input.
* <li>A sequence cannot have gaps in its video or image samples. In other words, if a sequence
* contains video or image data, it must contain this type of data in the entire sequence.
* <li>All the {@link EditedMediaItem} instances in a sequence must have the same audio format.
* <li>All the {@link EditedMediaItem} instances in a sequence must have the same effects
* applied.
* <li>The {@linkplain Composition#effects composition effects} must contain no {@linkplain
* Effects#audioProcessors audio effects}.
* <li>The composition effects must either contain no {@linkplain Effects#videoEffects video
* effects}, or exactly one {@link Presentation}.
* </ul>
*
* <p>The export state is notified through the {@linkplain Builder#addListener(Listener)
* listener}.
*
* <p>Concurrent exports on the same Transformer object are not allowed.
*
* <p>If no custom {@link Transformer.Builder#setMuxerFactory(Muxer.Factory) Muxer.Factory} is
* specified, the output is an MP4 file.
*
* <p>The output can contain at most one video track and one audio track. Other track types are
* ignored. For adaptive bitrate inputs, if no custom {@link
* Transformer.Builder#setAssetLoaderFactory(AssetLoader.Factory) AssetLoader.Factory} is
* specified, the highest bitrate video and audio streams are selected.
*
* <p>If exporting the video track entails transcoding, the output frames' dimensions will be
* swapped if the output video's height is larger than the width. This is to improve compatibility
* among different device encoders.
*
* @param composition The {@link Composition} to export.
* @param path The path to the output file.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If an export is already in progress.
*/
public void start(Composition composition, String path) {
checkArgument(composition.effects.audioProcessors.isEmpty());
// Only supports Presentation in video effects.
ImmutableList<Effect> videoEffects = composition.effects.videoEffects;
checkArgument(
videoEffects.isEmpty()
|| (videoEffects.size() == 1 && videoEffects.get(0) instanceof Presentation));
verifyApplicationThread();
checkState(transformerInternal == null, "There is already an export in progress.");
TransformerInternalListener transformerInternalListener =
new TransformerInternalListener(composition);
HandlerWrapper applicationHandler = clock.createHandler(looper, /* callback= */ null);
FallbackListener fallbackListener =
new FallbackListener(composition, listeners, applicationHandler, transformationRequest);
DebugTraceUtil.reset();
transformerInternal =
new TransformerInternal(
context,
composition,
path,
transformationRequest,
assetLoaderFactory,
videoFrameProcessorFactory,
encoderFactory,
muxerFactory,
transformerInternalListener,
fallbackListener,
applicationHandler,
debugViewProvider,
clock);
transformerInternal.start();
}
/**
* Starts an asynchronous operation to export the given {@link EditedMediaItem}.
*
* <p>The export state is notified through the {@linkplain Builder#addListener(Listener)
* listener}.
*
* <p>Concurrent exports on the same Transformer object are not allowed.
*
* <p>If no custom {@link Transformer.Builder#setMuxerFactory(Muxer.Factory) Muxer.Factory} is
* specified, the output is an MP4 file.
*
* <p>The output can contain at most one video track and one audio track. Other track types are
* ignored. For adaptive bitrate inputs, if no custom {@link
* Transformer.Builder#setAssetLoaderFactory(AssetLoader.Factory) AssetLoader.Factory} is
* specified, the highest bitrate video and audio streams are selected.
*
* <p>If exporting the video track entails transcoding, the output frames' dimensions will be
* swapped if the output video's height is larger than the width. This is to improve compatibility
* among different device encoders.
*
* @param editedMediaItem The {@link EditedMediaItem} to export.
* @param path The path to the output file.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If an export is already in progress.
*/
public void start(EditedMediaItem editedMediaItem, String path) {
EditedMediaItemSequence sequence =
new EditedMediaItemSequence(ImmutableList.of(editedMediaItem));
start(new Composition.Builder(ImmutableList.of(sequence)).build(), path);
}
/**
* Starts an asynchronous operation to export the given {@link MediaItem}.
*
* <p>The export state is notified through the {@linkplain Builder#addListener(Listener)
* listener}.
*
* <p>Concurrent exports on the same Transformer object are not allowed.
*
* <p>If no custom {@link Transformer.Builder#setMuxerFactory(Muxer.Factory) Muxer.Factory} is
* specified, the output is an MP4 file.
*
* <p>The output can contain at most one video track and one audio track. Other track types are
* ignored. For adaptive bitrate inputs, if no custom {@link
* Transformer.Builder#setAssetLoaderFactory(AssetLoader.Factory) AssetLoader.Factory} is
* specified, the highest bitrate video and audio streams are selected.
*
* <p>If exporting the video track entails transcoding, the output frames' dimensions will be
* swapped if the output video's height is larger than the width. This is to improve compatibility
* among different device encoders.
*
* @param mediaItem The {@link MediaItem} to export.
* @param path The path to the output file.
* @throws IllegalArgumentException If the {@link MediaItem} is not supported.
* @throws IllegalStateException If this method is called from the wrong thread.
* @throws IllegalStateException If an export is already in progress.
*/
public void start(MediaItem mediaItem, String path) {
if (!mediaItem.clippingConfiguration.equals(MediaItem.ClippingConfiguration.UNSET)
&& flattenForSlowMotion) {
throw new IllegalArgumentException(
"Clipping is not supported when slow motion flattening is requested");
}
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(mediaItem)
.setRemoveAudio(removeAudio)
.setRemoveVideo(removeVideo)
.setFlattenForSlowMotion(flattenForSlowMotion)
.setEffects(new Effects(audioProcessors, videoEffects))
.build();
start(editedMediaItem, path);
}
/**
* @deprecated Use {@link #start(MediaItem, String)} instead.
*/
@Deprecated
@InlineMe(replacement = "this.start(mediaItem, path)")
public void startTransformation(MediaItem mediaItem, String path) {
start(mediaItem, path);
}
/**
* Returns the {@link Looper} associated with the application thread that's used to access the
* transformer and on which transformer events are received.
*/
public Looper getApplicationLooper() {
return looper;
}
/**
* Returns the current {@link ProgressState} and updates {@code progressHolder} with the current
* progress if it is {@link #PROGRESS_STATE_AVAILABLE available}.
*
* <p>After an export {@linkplain Listener#onCompleted(Composition, ExportResult) completes}, this
* method returns {@link #PROGRESS_STATE_NOT_STARTED}.
*
* @param progressHolder A {@link ProgressHolder}, updated to hold the percentage progress if
* {@link #PROGRESS_STATE_AVAILABLE available}.
* @return The {@link ProgressState}.
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public @ProgressState int getProgress(ProgressHolder progressHolder) {
verifyApplicationThread();
return transformerInternal == null
? PROGRESS_STATE_NOT_STARTED
: transformerInternal.getProgress(progressHolder);
}
/**
* Cancels the export that is currently in progress, if any.
*
* <p>The export output file (if any) is not deleted.
*
* @throws IllegalStateException If this method is called from the wrong thread.
*/
public void cancel() {
verifyApplicationThread();
if (transformerInternal == null) {
return;
}
try {
transformerInternal.cancel();
} finally {
transformerInternal = null;
}
}
private void verifyApplicationThread() {
if (Looper.myLooper() != looper) {
throw new IllegalStateException("Transformer is accessed on the wrong thread.");
}
}
private final class TransformerInternalListener implements TransformerInternal.Listener {
private final Composition composition;
public TransformerInternalListener(Composition composition) {
this.composition = composition;
}
@Override
public void onCompleted(ExportResult exportResult) {
// TODO(b/213341814): Add event flags for Transformer events.
transformerInternal = null;
listeners.queueEvent(
/* eventFlag= */ C.INDEX_UNSET,
listener -> listener.onCompleted(composition, exportResult));
listeners.flushEvents();
}
@Override
public void onError(ExportResult exportResult, ExportException exportException) {
transformerInternal = null;
listeners.queueEvent(
/* eventFlag= */ C.INDEX_UNSET,
listener -> listener.onError(composition, exportResult, exportException));
listeners.flushEvents();
}
}
}
|
googleapis/google-cloud-java | 35,325 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RegionSetLabelsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionSetLabelsRequest}
*/
public final class RegionSetLabelsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RegionSetLabelsRequest)
RegionSetLabelsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegionSetLabelsRequest.newBuilder() to construct.
private RegionSetLabelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RegionSetLabelsRequest() {
labelFingerprint_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RegionSetLabelsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionSetLabelsRequest.class,
com.google.cloud.compute.v1.RegionSetLabelsRequest.Builder.class);
}
private int bitField0_;
public static final int LABEL_FINGERPRINT_FIELD_NUMBER = 178124825;
@SuppressWarnings("serial")
private volatile java.lang.Object labelFingerprint_ = "";
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return Whether the labelFingerprint field is set.
*/
@java.lang.Override
public boolean hasLabelFingerprint() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return The labelFingerprint.
*/
@java.lang.Override
public java.lang.String getLabelFingerprint() {
java.lang.Object ref = labelFingerprint_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
labelFingerprint_ = s;
return s;
}
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return The bytes for labelFingerprint.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLabelFingerprintBytes() {
java.lang.Object ref = labelFingerprint_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
labelFingerprint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABELS_FIELD_NUMBER = 500195327;
private static final class LabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_LabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 178124825, labelFingerprint_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 500195327);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(178124825, labelFingerprint_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
LabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(500195327, labels__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.RegionSetLabelsRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RegionSetLabelsRequest other =
(com.google.cloud.compute.v1.RegionSetLabelsRequest) obj;
if (hasLabelFingerprint() != other.hasLabelFingerprint()) return false;
if (hasLabelFingerprint()) {
if (!getLabelFingerprint().equals(other.getLabelFingerprint())) return false;
}
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLabelFingerprint()) {
hash = (37 * hash) + LABEL_FINGERPRINT_FIELD_NUMBER;
hash = (53 * hash) + getLabelFingerprint().hashCode();
}
if (!internalGetLabels().getMap().isEmpty()) {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.RegionSetLabelsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionSetLabelsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RegionSetLabelsRequest)
com.google.cloud.compute.v1.RegionSetLabelsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 500195327:
return internalGetMutableLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionSetLabelsRequest.class,
com.google.cloud.compute.v1.RegionSetLabelsRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.RegionSetLabelsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
labelFingerprint_ = "";
internalGetMutableLabels().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionSetLabelsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionSetLabelsRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RegionSetLabelsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionSetLabelsRequest build() {
com.google.cloud.compute.v1.RegionSetLabelsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionSetLabelsRequest buildPartial() {
com.google.cloud.compute.v1.RegionSetLabelsRequest result =
new com.google.cloud.compute.v1.RegionSetLabelsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.RegionSetLabelsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.labelFingerprint_ = labelFingerprint_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.RegionSetLabelsRequest) {
return mergeFrom((com.google.cloud.compute.v1.RegionSetLabelsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.RegionSetLabelsRequest other) {
if (other == com.google.cloud.compute.v1.RegionSetLabelsRequest.getDefaultInstance())
return this;
if (other.hasLabelFingerprint()) {
labelFingerprint_ = other.labelFingerprint_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1424998602:
{
labelFingerprint_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 1424998602
case -293404678:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableLabels()
.getMutableMap()
.put(labels__.getKey(), labels__.getValue());
bitField0_ |= 0x00000002;
break;
} // case -293404678
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object labelFingerprint_ = "";
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return Whether the labelFingerprint field is set.
*/
public boolean hasLabelFingerprint() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return The labelFingerprint.
*/
public java.lang.String getLabelFingerprint() {
java.lang.Object ref = labelFingerprint_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
labelFingerprint_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return The bytes for labelFingerprint.
*/
public com.google.protobuf.ByteString getLabelFingerprintBytes() {
java.lang.Object ref = labelFingerprint_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
labelFingerprint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @param value The labelFingerprint to set.
* @return This builder for chaining.
*/
public Builder setLabelFingerprint(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
labelFingerprint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @return This builder for chaining.
*/
public Builder clearLabelFingerprint() {
labelFingerprint_ = getDefaultInstance().getLabelFingerprint();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The fingerprint of the previous set of labels for this resource, used to detect conflicts. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint.
* </pre>
*
* <code>optional string label_fingerprint = 178124825;</code>
*
* @param value The bytes for labelFingerprint to set.
* @return This builder for chaining.
*/
public Builder setLabelFingerprintBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
labelFingerprint_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels() {
if (labels_ == null) {
labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
}
if (!labels_.isMutable()) {
labels_ = labels_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearLabels() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableLabels().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder removeLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
bitField0_ |= 0x00000002;
return internalGetMutableLabels().getMutableMap();
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder putLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableLabels().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* The labels to set for this resource.
* </pre>
*
* <code>map<string, string> labels = 500195327;</code>
*/
public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableLabels().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RegionSetLabelsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RegionSetLabelsRequest)
private static final com.google.cloud.compute.v1.RegionSetLabelsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RegionSetLabelsRequest();
}
public static com.google.cloud.compute.v1.RegionSetLabelsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RegionSetLabelsRequest> PARSER =
new com.google.protobuf.AbstractParser<RegionSetLabelsRequest>() {
@java.lang.Override
public RegionSetLabelsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RegionSetLabelsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RegionSetLabelsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionSetLabelsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/nosql | 35,272 | kvmain/src/main/java/oracle/kv/impl/xregion/agent/BaseTableTransferThread.java | /*-
* Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.impl.xregion.agent;
import static oracle.kv.impl.util.CommonLoggerUtils.exceptionString;
import static oracle.kv.impl.xregion.service.JsonConfig.DEFAULT_BATCH_SIZE_PER_REQUEST;
import static oracle.kv.impl.xregion.service.JsonConfig.DEFAULT_ROWS_REPORT_PROGRESS_INTV;
import static oracle.kv.impl.xregion.service.JsonConfig.DEFAULT_THREADS_TABLE_ITERATOR;
import static oracle.kv.impl.xregion.stat.TableInitStat.TableInitState.COMPLETE;
import static oracle.kv.impl.xregion.stat.TableInitStat.TableInitState.ERROR;
import static oracle.kv.impl.xregion.stat.TableInitStat.TableInitState.IN_PROGRESS;
import static oracle.kv.impl.xregion.stat.TableInitStat.TableInitState.NOT_START;
import static oracle.kv.impl.xregion.stat.TableInitStat.TableInitState.SHUTDOWN;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import oracle.kv.Consistency;
import oracle.kv.Direction;
import oracle.kv.FaultException;
import oracle.kv.MetadataNotFoundException;
import oracle.kv.StoreIteratorException;
import oracle.kv.impl.api.table.TableAPIImpl;
import oracle.kv.impl.api.table.TableImpl;
import oracle.kv.impl.test.ExceptionTestHook;
import oracle.kv.impl.test.ExceptionTestHookExecute;
import oracle.kv.impl.test.TestHook;
import oracle.kv.impl.test.TestHookExecute;
import oracle.kv.impl.util.Pair;
import oracle.kv.impl.util.ThreadUtils;
import oracle.kv.impl.util.UserDataControl;
import oracle.kv.impl.util.server.LoggerUtils;
import oracle.kv.impl.xregion.init.TableInitCheckpoint;
import oracle.kv.impl.xregion.service.MRTableMetrics;
import oracle.kv.impl.xregion.service.RegionInfo;
import oracle.kv.impl.xregion.service.ServiceMDMan;
import oracle.kv.impl.xregion.stat.TableInitStat;
import oracle.kv.table.PrimaryKey;
import oracle.kv.table.Row;
import oracle.kv.table.Table;
import oracle.kv.table.TableIterator;
import oracle.kv.table.TableIteratorOptions;
import com.sleepycat.je.utilint.StoppableThread;
/**
* Object represents the base class of table transfer thread
*/
abstract public class BaseTableTransferThread extends StoppableThread {
/**
* A test hook that can be used by test during table transfer, the
* parameter is pair of 1) the name of table being transferred and 2) the
* number of transferred rows in that table.
*/
public static volatile TestHook<Pair<String, Long>>
transInProgressHook = null;
/**
* unit test only, test hook to generate failure. The hook will be called
* after writing a transferred row from source to the target store. The
* arguments pass the number of transferred rows and exception thrown if
* the hook is fired.
*/
public static volatile ExceptionTestHook<Long, Exception> expHook = null;
/** soft shutdown waiting time in ms */
private static final int SOFT_SHUTDOWN_WAIT_MS = 5000;
/** time interval in ms to report table initialization progress */
private static final long TABLE_INIT_REPORT_INTV_MS = 30 * 60 * 1000;
/** private logger */
protected final Logger logger;
/** parent region agent */
protected final RegionAgentThread parent;
/** local table md to initialize, can be refreshed */
protected volatile TableImpl table;
/** source region */
protected final RegionInfo srcRegion;
/** cause of failure */
protected volatile Exception cause;
/** if the thread is requested to shut down */
private final AtomicBoolean shutdownReq;
/** true if transfer is complete */
protected volatile boolean complete;
/** source Table API */
private final TableAPIImpl srcAPI;
/**
* table iterator option
*/
private final TableIteratorOptions iter_opt;
/**
* checkpoint primary key to resume table scan, or an empty key
* {@link Table#createPrimaryKey()} for a full table scan. If null,
* either the checkpoint is corrupted or the table initialization is
* done, in either case there is no need to resume the table copy.
*/
private volatile PrimaryKey ckptKey;
/**
* the table to scan
*/
private final Table srcTable;
/**
* unit test only
*/
private volatile BaseRegionAgentMetrics metrics = null;
/**
* last primary persisted to target store
*/
private volatile PrimaryKey lastPersistPkey;
/**
* True if check redundant table transfer
*/
private volatile boolean checkRedundant;
/**
* Time to start table transfer
*/
private volatile long startTime;
/**
* Constructs an instance of table transfer thread
*
* @param threadName thread name
* @param parent parent region agent
* @param table table to transfer
* @param srcRegion source region to transfer table from
* @param srcAPI source region table api
* @param logger private logger
*/
public BaseTableTransferThread(String threadName,
RegionAgentThread parent,
Table table,
RegionInfo srcRegion,
TableAPIImpl srcAPI,
Logger logger) {
super(threadName);
this.parent = parent;
this.table = (TableImpl) table;
this.srcRegion = srcRegion;
this.srcAPI = srcAPI;
this.logger = logger;
final String tableName = table.getFullNamespaceName();
shutdownReq = new AtomicBoolean(false);
complete = false;
cause = null;
setUncaughtExceptionHandler(new ExceptionHandler());
if (parent == null) {
/* unit test only */
srcTable = srcAPI.getTable(tableName);
} else {
/*
* the region agent starts up a table transfer only after the
* table has been ensured to exist at remote region, therefore,
* the table instance must be cached.
*/
srcTable = parent.getMdMan().getRemoteTable(srcRegion.getName(),
tableName);
}
if (srcTable == null) {
/*
* We have ensured existence of remote table, the source table
* must have been cached and cannot be null. We leave detecting
* the missing remote table to the time when the table is being
* transferred. If the source table is dropped at that time, we
* would encounter {@link oracle.kv.StoreIteratorException}
*/
final String err = "Cached remote table not found" +
", table=" + tableName +
", region=" + srcRegion.getName();
logger.warning(lm(err));
throw new IllegalStateException(err);
}
/* verify that the table from remote region has the right name */
if (!tableName.equals(srcTable.getFullNamespaceName())) {
final String err =
"Source table=" + srcTable.getFullNamespaceName() +
" does not match the requested table=" + tableName;
throw new IllegalStateException(err);
}
ckptKey = null;
iter_opt = new TableIteratorOptions(
Direction.FORWARD,
Consistency.ABSOLUTE,
/* iterator timeout upper bounded by store read timeout */
srcAPI.getStore().getReadTimeoutMs(),
TimeUnit.MILLISECONDS,
getTableThreads(),
getTableBatchSz());
/* always include tombstones in MR table initialization */
iter_opt.setIncludeTombstones();
/* create initialization stats */
final BaseRegionAgentMetrics currentMetrics = getMetrics();
if (currentMetrics == null) {
/* unit test only */
return;
}
checkRedundant = true;
startTime = 0;
}
/**
* Returns table id to identify table initialized checkpoint (TIC). For
* MR table, the table id is the target (local) table id. For PITR, table
* id is the source (local) table id, there is no target table for PITR.
* @return table id of local table.
*/
abstract protected long getTICTableId();
/**
* Pushes the row from the source region
*
* @param srcRow row of source region
*/
abstract protected void pushRow(Row srcRow);
/**
* Returns the stat summary of the thread
*
* @return the stat summary of the thread
*/
abstract protected String dumpStat();
/**
* Adds prefix for log messages
*
* @param msg log message
* @return log message with prefix
*/
protected String lm(String msg) {
/* use thread name */
return "[" + getName() + "] " + msg;
}
@Override
protected Logger getLogger() {
return logger;
}
@Override
public void run() {
logger.info(lm("Start transfer thread for table=" +
ServiceMDMan.getTrace(table) +
" from region=" + srcRegion.getName()));
startTime = System.currentTimeMillis();
final String region = srcRegion.getName();
final String tableName = table.getFullNamespaceName();
final long tableId = table.getId();
/* first check if the table should be initialized by this agent */
if (parent != null && !parent.belongsToMe(tableName, region)) {
complete = true;
/* table belongs to another agent */
logger.info(lm("Transfer exits for table=" + tableName +
" region=" + region));
return;
}
if (parent != null) {
logger.info(lm("Table=" + tableName + " region=" + region +
" belongs to me=" + parent.getAgentSubscriberId()));
}
if (checkRedundant && isRedundantTransfer()) {
logger.info(lm("Skip transfer because there is already an " +
"existing transfer thread for table=" +
tableName + " from region=" + region));
return;
}
try {
ckptKey = getCheckpointKey();
if (ckptKey == null) {
logger.info(lm("Skip transfer for table=" + tableName));
return;
}
} catch (CheckpointKeyException exp) {
cause = exp;
final String msg =
"Fail to get checkpoint key for table=" + tableName +
" from region=" + srcRegion.getName() +
", error=" + exp + ", stack trace\n" +
LoggerUtils.getStackTrace(exp);
logger.warning(lm(msg));
return;
}
/* set start time */
getMetrics().getTableMetrics(tableName)
.getRegionInitStat(region)
.setTransferStartMs(startTime);
int attempts = 0;
/* remember the exception encountered */
Exception error = null;
/*
* the thread will indefinitely retry until transfer is complete, or
* service shuts down, or encounters unexpected failure
*/
while (!complete && !shutdownRequested()) {
final long tid = (parent == null) ? 0 /* unit test only*/ :
tableId;
/* start time of this attempt */
final long ts = System.currentTimeMillis();
logger.info(lm("Starting transfer table=" + tableName +
"(id=" + tid +
", remote id=" + ((TableImpl) srcTable).getId() +
") from region=" + region +
(isFullScan(srcTable, ckptKey) ? ", full scan" :
", from checkpoint key hash=" +
getKeyHash(ckptKey)) + ", attempts=" + attempts));
try {
transTable(++attempts, ts);
} catch (StoreIteratorException sie) {
final Throwable err = sie.getCause();
if (err instanceof MetadataNotFoundException) {
procMissingRemoteTable();
error = sie;
break;
}
/* retry on other cases */
retryLog(sie, attempts, ts);
} catch (FaultException fe) {
/* always retry */
retryLog(fe, attempts, ts);
} catch (MetadataNotFoundException exp) {
logger.warning(lm("Fail to transfer table=" + tableName +
" not found at local region, error=" + exp));
error = exp;
break;
} catch (Exception exp) {
/* no retry on hard failures */
if (!shutdownRequested() &&
(parent != null && !parent.isShutdownRequested())) {
error = exp;
}
break;
}
}
/* success */
if (parent == null) {
/*
* in some unit test without parent region agent, no need to
* update the stats and checkpoint below
*/
return;
}
/* normal case */
final ServiceMDMan mdMan = parent.getMdMan();
if (complete) {
/* update table init checkpoint to complete */
mdMan.writeCkptRetry(region, tableName, getTICTableId(), null,
COMPLETE);
getMetrics().getTableMetrics(tableName).getRegionInitStat(region)
.setTransferCompleteMs(System.currentTimeMillis());
logger.info(lm("Complete transferring table=" +
ServiceMDMan.getTrace(table) +
", remote id=" +
mdMan.getRemoteTableId(region, tableName) +
" in attempts=" + attempts +
", shutdown=" + shutdownRequested()));
logger.fine(() -> lm(dumpStat()));
return;
}
/* unexpected failure, no retry */
if (error != null) {
getMetrics().getTableMetrics(tableName).getRegionInitStat(region)
.setState(ERROR);
mdMan.writeTableInitCkpt(region, tableName, getTICTableId(),
lastPersistPkey, ERROR,
error.getMessage());
final String msg = "Fail to copy table=" + tableName +
" from region=" + srcRegion.getName() +
" in attempts=" + attempts +
", shutdown=" + shutdownRequested() +
", error=" + error +
", " + dumpStat() +
(logger.isLoggable(Level.FINE) ? "\n" +
LoggerUtils.getStackTrace(error) : "");
logger.warning(lm(msg));
/*
* finally, we set cause to let RegionAgentThread to catch the
* failure, note that the RegionAgentThread can only catch the
* failure after the checkpoint with ERROR state is persistent.
* Otherwise, we might lose the error checkpoint.
*/
cause = error;
return;
}
/* shutdown requested */
getMetrics().getTableMetrics(tableName).getRegionInitStat(region)
.setState(SHUTDOWN);
logger.info(lm("Shutdown requested before completing" +
" transfer table=" + tableName +
" in attempts=" + attempts));
logger.fine(() -> lm(dumpStat()));
}
@Override
protected int initiateSoftShutdown() {
logger.fine(() -> lm("Signal thread=" + getName() + " to shutdown" +
", wait for time(ms)=" + SOFT_SHUTDOWN_WAIT_MS +
" to exit"));
return SOFT_SHUTDOWN_WAIT_MS;
}
/**
* Shuts down the transfer thread
*/
public void shutdown() {
if (!shutdownReq.compareAndSet(false, true)) {
logger.fine(() -> lm("Shutdown already signalled"));
return;
}
final long ts = System.currentTimeMillis();
shutdownThread(logger);
logger.info(lm("Thread=" + getName() + " has shut down in time(ms)=" +
(System.currentTimeMillis() - ts)));
}
/**
* Gets the start timestamp, or 0 if not started
* @return the start timestamp, or 0 if not started
*/
public long getStartTime() {
return startTime;
}
/**
* Gets the cause of failure, or null if transfer is complete or shutdown
* by the parent agent
*
* @return the cause of failure, or null.
*/
public Exception getCause() {
return cause;
}
/**
* Returns the local table
*/
public Table getTable() {
return table;
}
/**
* Returns true if the transfer is complete, false otherwise
*
* @return true if the transfer is complete, false otherwise
*/
public boolean isComplete() {
return complete;
}
/**
* Returns true if the transfer has failed, false otherwise
*
* @return true if the transfer has failed, false otherwise
*/
public boolean hasFailed() {
return cause != null;
}
/**
* Returns true if enough rows have been transferred, or enough time has
* elapsed since last report that we should log initialization progress,
* otherwise false.
*
* @param rowsTrans # rows already transferred
* @param lastRows # of rows transferred in last report
* @param lastTs timestamp of last report
*
* @return true if it needs to report progress, false otherwise
*/
private boolean reportProgress(long rowsTrans, long lastRows, long lastTs) {
if (noProgress(rowsTrans, lastRows)) {
return false;
}
if (System.currentTimeMillis() - lastTs > TABLE_INIT_REPORT_INTV_MS) {
return true;
}
return (rowsTrans - lastRows) > getTableReportIntv();
}
/***
* Returns true if no progress has been made since last report
*
* @param rowsTrans # rows already transferred
* @param lastReported # of rows transferred in last report
* @return true if no progress has been made since last report
*/
private boolean noProgress(long rowsTrans, long lastReported) {
return rowsTrans == 0 || rowsTrans == lastReported;
}
/**
* Transfer table from source region. The transfer will start from a
* given start key each time and update the start key during transfer.
* It will return
* - transfer is complete, or
* - shutdown is requested, or
* - transfer fails because an exception is thrown
*
* @param attempts # of attempts
* @param ts start time of this attempt
*/
private void transTable(int attempts, long ts) {
TableIterator<Row> iterator = null;
long lastReportedRows = 0; /* # of transferred rows in last report */
long lastReportTs = 0; /* timestamp of last report */
final String tableName = table.getFullNamespaceName();
try {
complete = false;
/* set the resume key if not full table scan */
if (ckptKey != null && !isFullScan(srcTable, ckptKey)) {
iter_opt.setResumePrimaryKey(ckptKey);
logger.info(lm("Set resume key hash=" + getKeyHash(ckptKey) +
" for table=" + tableName +
"(id=" + ((TableImpl) srcTable).getId() + ")"));
}
/* create iterator from given start key */
iterator = srcAPI.tableIterator(
srcTable.createPrimaryKey(), null, iter_opt);
/* rows transferred in this attempt */
long rows = 0;
while (!shutdownRequested() && iterator.hasNext()) {
final Row srcRow = iterator.next();
pushRow(srcRow);
lastPersistPkey = srcRow.createPrimaryKey();
rows++;
/* report progress */
if (reportProgress(rows, lastReportedRows, lastReportTs)) {
/* log table transfer stats */
lastReportedRows = rows;
lastReportTs = System.currentTimeMillis();
logTableInitStat(attempts, ts, rows);
if (parent != null) {
/* update checkpoint if not unit test */
doCheckpoint();
}
}
/* unit test only: simulate failure */
fireTestHookInTransfer(rows);
}
if (!shutdownRequested()) {
complete = true;
}
} finally {
if (iterator != null) {
iterator.close();
}
}
}
public void doCheckpoint() {
if (lastPersistPkey == null) {
/* did not transfer any row, no checkpoint */
return;
}
parent.getMdMan().writeTableInitCkpt(srcRegion.getName(),
table.getFullNamespaceName(),
getTICTableId(),
lastPersistPkey, IN_PROGRESS,
null);
}
private int getTableThreads() {
if (parent == null) {
/* unit test */
return DEFAULT_THREADS_TABLE_ITERATOR;
}
return parent.getMdMan().getJsonConf().getTableThreads();
}
private int getTableBatchSz() {
if (parent == null) {
/* unit test */
return DEFAULT_BATCH_SIZE_PER_REQUEST;
}
return parent.getMdMan().getJsonConf().getTableBatchSz();
}
private int getTableReportIntv() {
if (parent == null) {
/* unit test */
return DEFAULT_ROWS_REPORT_PROGRESS_INTV;
}
return parent.getMdMan().getJsonConf().getTableReportIntv();
}
/*
* Stat reference may change if interval stat is collected, thus get
* the reference from parent instead of keeping a constant reference
*/
protected BaseRegionAgentMetrics getMetrics() {
if (parent == null) {
/* unit test only */
return metrics;
}
return parent.getMetrics();
}
/**
* unit test only
*/
protected void setMetrics(BaseRegionAgentMetrics val) {
metrics = val;
}
/**
* Fires test hook, unit test only
*
* @param rows # rows transferred
*/
private void fireTestHookInTransfer(long rows) {
final String tableName = table.getFullNamespaceName();
try {
/* test hook to throw exception */
assert ExceptionTestHookExecute.doHookIfSet(expHook, rows);
/* test hook in transfer */
assert TestHookExecute.doHookIfSet(transInProgressHook,
new Pair<>(tableName, rows));
} catch (Exception exp) {
final String err = exp.getMessage();
logger.warning(lm("TEST ONLY: cause=" + err));
throw new IllegalStateException(err, exp);
}
}
private String getKeyHash(PrimaryKey key) {
if (key == null) {
return "null";
}
return UserDataControl.getHash(key.toJsonString(false).getBytes());
}
private boolean isFullScan(Table tb, PrimaryKey primaryKey) {
return primaryKey.equals(tb.createPrimaryKey());
}
/**
* Builds primary key from checkpoint
*/
private PrimaryKey buildPrimaryKey(TableInitCheckpoint ckpt) {
PrimaryKey ret;
if (NOT_START.equals(ckpt.getState())) {
ret = srcTable.createPrimaryKey();
} else {
final String json = ckpt.getPrimaryKey();
ret = srcTable.createPrimaryKeyFromJson(json, true);
}
final String tableName = table.getFullNamespaceName();
logger.info(lm("Table=" + tableName +
(isFullScan(srcTable, ret) ?
" requires full scan" :
" resumes from key hash=" + getKeyHash(ret))));
return ret;
}
/**
* Reads the primary key from checkpoint table
*
* @return primary key to resume scan, or null if table initialization is
* complete, or the checkpoint is gone in the checkpoint table.
* @throws CheckpointKeyException if unable to read the checkpoint
*/
private PrimaryKey getCheckpointKey() throws CheckpointKeyException {
/* build primary key from checkpoint, or a full table scan */
if (parent == null) {
/* some unit test only, always full table scan */
return srcTable.createPrimaryKey();
}
/* normal case */
final String region = srcRegion.getName();
final String tableName = table.getFullNamespaceName();
final TableInitCheckpoint ckpt;
try {
ckpt = readCheckpoint(tableName, region);
} catch (IllegalStateException exp) {
final String err = "Cannot read checkpoint for table=" + tableName +
", error=" + exceptionString(exp);
throw new CheckpointKeyException(err, exp);
}
if (ckpt == null) {
final String msg = "No checkpoint for table=" + tableName +
", table might be dropped from region=" +
srcRegion.getName();
logger.info(lm(msg));
return null;
}
/* verify the checkpoint has matching table id */
if (ckpt.getTableId() != getTICTableId()) {
final String msg = "Table id=" + ckpt.getTableId() +
" in checkpoint for table=" + tableName +
" does not match expected id=" +
getTICTableId() +
", reset table init checkpoint";
parent.resetInitCkpt(Collections.singleton(table));
logger.info(lm(msg));
/* full scan */
return srcTable.createPrimaryKey();
}
if (COMPLETE.equals(ckpt.getState())) {
final String msg = "Skip transfer because initialization " +
"of table=" + tableName + " is already " +
"complete";
logger.info(lm(msg));
return null;
}
/* get the primary key from checkpoint */
return buildPrimaryKey(ckpt);
}
/**
* Reads the table initialization checkpoint. It would first read the
* checkpoint made by itself. If not exist, it would read checkpoint from
* other agent. If exists, a new checkpoint table would be persisted to
* replace the checkpoint from other agent.
* @param tableName table name
* @param region region name
* @return table initialization checkpoint
*/
private TableInitCheckpoint readCheckpoint(String tableName,
String region) {
/* read checkpoint from myself */
TableInitCheckpoint ckpt =
parent.getMdMan().readTableInitCkpt(region, tableName);
if (ckpt == null) {
/* make another attempt to read checkpoint from other agents */
ckpt = parent.getMdMan().readCkptAnyAgent(tableName, region);
}
if (ckpt == null) {
logger.info(lm("Cannot find any checkpoint for table=" + tableName +
" from region=" + region));
return null;
}
final boolean fromMe =
ckpt.getAgentId().equals(parent.getAgentSubscriberId().toString());
logger.info(lm("Found checkpoint made by " +
(fromMe ? "myself=" : "another agent=") +
ckpt.getAgentId() + ", ckpt="+ ckpt));
/*
* if the checkpoint from another agent, do clean up and persist a
* new checkpoint with my agent id
*/
if (!fromMe) {
/* write a new checkpoint */
final PrimaryKey pkey = buildPrimaryKey(ckpt);
parent.getMdMan().writeTableInitCkpt(region,
tableName,
getTICTableId(),
pkey,
ckpt.getState(),
ckpt.getMessage());
final TableInitCheckpoint tic =
parent.getMdMan().readTableInitCkpt(region, tableName);
logger.info(lm("Refresh existing checkpoint with my agentId=" +
parent.getAgentSubscriberId() + ", ckpt=" + tic));
/* delete all stale checkpoints from other agents */
parent.getMdMan().delInitCkpt(region, tableName,
true/* keep my own checkpoint */);
}
return ckpt;
}
/**
* Exception thrown when unable to get the primary key from the checkpoint
*/
private static class CheckpointKeyException extends IllegalStateException {
private static final long serialVersionUID = 1;
CheckpointKeyException(String msg, Throwable cause) {
super(msg, cause);
}
}
/**
* Uncaught exception handler
*/
private class ExceptionHandler implements UncaughtExceptionHandler {
@Override
public void uncaughtException(Thread t, Throwable e) {
logger.warning(lm("Uncaught exception in transfer thread for" +
" table=" + ServiceMDMan.getTrace(table) +
" from region=" + srcRegion.getName() +
", thread=" + t.getName() +
", id=" + ThreadUtils.threadId(t) +
", error=" + e +
"\n" + LoggerUtils.getStackTrace(e)));
}
}
/**
* Skips check redundant table transfer. The polling thread should call
* it to disable the check, because transfer thread submitted by the
* polling thread cannot be redundant.
*/
protected void skipCheckRedundant() {
checkRedundant = false;
}
private boolean isRedundantTransfer() {
if (parent == null) {
/* unit test only */
return false;
}
/*
* since the region agent only transfers one table at a time, when
* this thread is scheduled to run, all previous transfers have
* either been complete or terminated because of errors. In either
* case we can look at the state of the checkpoint table to determine
* if a table has been transferred. Here we only need to check the
* polling thread to see if the table has been scheduled to transfer.
*/
return parent.inPolling(table);
}
private void retryLog(Throwable exp, int attempts, long startTs) {
final boolean fullScan = isFullScan(srcTable, ckptKey);
/* retry transfer */
final String msg =
"Unable to copy table=" + ServiceMDMan.getTrace(table) +
", remote table=" + ServiceMDMan.getTrace(srcTable) +
" from region=" + srcRegion.getName() +
" in attempts=" + attempts +
"(elapsedMs=" + (System.currentTimeMillis() - startTs) +
"), error=" + exp +
", will retry " +
(fullScan ? "full scan" :
"from checkpoint key hash=" + ckptKey.hashCode()) +
(logger.isLoggable(Level.FINE) ?
LoggerUtils.getStackTrace(exp) : "");
logger.info(lm(msg));
}
/**
* Processes missing remote tables
*/
private void procMissingRemoteTable() {
final String tableName = table.getFullNamespaceName();
final String region = srcRegion.getName();
if (parent.getMdMan().verifyTableInfo(tableName, srcRegion)) {
logger.fine(() -> lm("Verified table=" +
ServiceMDMan.getTrace(table) +
" at region=" + region));
} else {
logger.info(lm("Fail to verify table=" +
ServiceMDMan.getTrace(table) +
" at region=" + region));
}
try {
parent.recreateRemoteTable(tableName, "table transfer");
} catch (InterruptedException e) {
logger.info(lm("Interrupted in adding table=" +
ServiceMDMan.getTrace(table) +
" initialization request, ignore"));
}
}
protected boolean shutdownRequested() {
return shutdownReq.get();
}
private TableInitStat getTableInitMetrics(String tableName) {
final MRTableMetrics tm = getMetrics().getTableMetrics(tableName);
if (tm == null) {
return null;
}
return tm.getRegionInitStat(srcRegion.getName());
}
private void logTableInitStat(int attempt, long startTs, long rows) {
final String tableName = table.getFullNamespaceName();
final TableInitStat st = getTableInitMetrics(tableName);
final long elapsedMs = System.currentTimeMillis() - startTs;
final double throughput = 1000.0 * rows / elapsedMs;
final String msg =
"In transferring table=" + ServiceMDMan.getTrace(table) +
", attempts=" + attempt +
", elapsedMs=" + elapsedMs +
", # rows transferred in this attempt=" + rows +
", throughput(rows/sec)=" + throughput +
", in current stat report interval: #rows transferred=" +
(st != null ? st.getTransferRows() : "NA") +
", #rows persisted=" +
(st != null ? st.getPersistRows() : "NA");
logger.info(lm(msg));
}
}
|
openjdk/jdk8 | 35,331 | jdk/src/share/classes/com/sun/media/sound/SoftPerformer.java | /*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.media.sound;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This class decodes information from ModelPeformer for use in SoftVoice.
* It also adds default connections if they where missing in ModelPerformer.
*
* @author Karl Helgason
*/
public final class SoftPerformer {
static ModelConnectionBlock[] defaultconnections
= new ModelConnectionBlock[42];
static {
int o = 0;
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("noteon", "on", 0),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1, new ModelDestination(new ModelIdentifier("eg", "on", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("noteon", "on", 0),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1, new ModelDestination(new ModelIdentifier("eg", "on", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("eg", "active", 0),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1, new ModelDestination(new ModelIdentifier("mixer", "active", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("eg", 0),
ModelStandardTransform.DIRECTION_MAX2MIN,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
-960, new ModelDestination(new ModelIdentifier("mixer", "gain")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("noteon", "velocity"),
ModelStandardTransform.DIRECTION_MAX2MIN,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_CONCAVE),
-960, new ModelDestination(new ModelIdentifier("mixer", "gain")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi", "pitch"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
new ModelSource(new ModelIdentifier("midi_rpn", "0"),
new ModelTransform() {
public double transform(double value) {
int v = (int) (value * 16384.0);
int msb = v >> 7;
int lsb = v & 127;
return msb * 100 + lsb;
}
}),
new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("noteon", "keynumber"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
12800, new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "7"),
ModelStandardTransform.DIRECTION_MAX2MIN,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_CONCAVE),
-960, new ModelDestination(new ModelIdentifier("mixer", "gain")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "8"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1000, new ModelDestination(new ModelIdentifier("mixer", "balance")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "10"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1000, new ModelDestination(new ModelIdentifier("mixer", "pan")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "11"),
ModelStandardTransform.DIRECTION_MAX2MIN,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_CONCAVE),
-960, new ModelDestination(new ModelIdentifier("mixer", "gain")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "91"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1000, new ModelDestination(new ModelIdentifier("mixer", "reverb")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "93"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
1000, new ModelDestination(new ModelIdentifier("mixer", "chorus")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "71"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
200, new ModelDestination(new ModelIdentifier("filter", "q")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "74"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
9600, new ModelDestination(new ModelIdentifier("filter", "freq")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "72"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
6000, new ModelDestination(new ModelIdentifier("eg", "release2")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "73"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
2000, new ModelDestination(new ModelIdentifier("eg", "attack2")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "75"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
6000, new ModelDestination(new ModelIdentifier("eg", "decay2")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "67"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_SWITCH),
-50, new ModelDestination(ModelDestination.DESTINATION_GAIN));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_cc", "67"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_SWITCH),
-2400, new ModelDestination(ModelDestination.DESTINATION_FILTER_FREQ));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_rpn", "1"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
100, new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("midi_rpn", "2"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
12800, new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("master", "fine_tuning"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
100, new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(
new ModelSource(
new ModelIdentifier("master", "coarse_tuning"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
12800, new ModelDestination(new ModelIdentifier("osc", "pitch")));
defaultconnections[o++] = new ModelConnectionBlock(13500,
new ModelDestination(new ModelIdentifier("filter", "freq", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "delay", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "attack", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "hold", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "decay", 0)));
defaultconnections[o++] = new ModelConnectionBlock(1000,
new ModelDestination(new ModelIdentifier("eg", "sustain", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "release", 0)));
defaultconnections[o++] = new ModelConnectionBlock(1200.0
* Math.log(0.015) / Math.log(2), new ModelDestination(
new ModelIdentifier("eg", "shutdown", 0))); // 15 msec default
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "delay", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "attack", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "hold", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "decay", 1)));
defaultconnections[o++] = new ModelConnectionBlock(1000,
new ModelDestination(new ModelIdentifier("eg", "sustain", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("eg", "release", 1)));
defaultconnections[o++] = new ModelConnectionBlock(-8.51318,
new ModelDestination(new ModelIdentifier("lfo", "freq", 0)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("lfo", "delay", 0)));
defaultconnections[o++] = new ModelConnectionBlock(-8.51318,
new ModelDestination(new ModelIdentifier("lfo", "freq", 1)));
defaultconnections[o++] = new ModelConnectionBlock(
Float.NEGATIVE_INFINITY, new ModelDestination(
new ModelIdentifier("lfo", "delay", 1)));
}
public int keyFrom = 0;
public int keyTo = 127;
public int velFrom = 0;
public int velTo = 127;
public int exclusiveClass = 0;
public boolean selfNonExclusive = false;
public boolean forcedVelocity = false;
public boolean forcedKeynumber = false;
public ModelPerformer performer;
public ModelConnectionBlock[] connections;
public ModelOscillator[] oscillators;
public Map<Integer, int[]> midi_rpn_connections = new HashMap<Integer, int[]>();
public Map<Integer, int[]> midi_nrpn_connections = new HashMap<Integer, int[]>();
public int[][] midi_ctrl_connections;
public int[][] midi_connections;
public int[] ctrl_connections;
private List<Integer> ctrl_connections_list = new ArrayList<Integer>();
private static class KeySortComparator implements Comparator<ModelSource> {
public int compare(ModelSource o1, ModelSource o2) {
return o1.getIdentifier().toString().compareTo(
o2.getIdentifier().toString());
}
}
private static KeySortComparator keySortComparator = new KeySortComparator();
private String extractKeys(ModelConnectionBlock conn) {
StringBuffer sb = new StringBuffer();
if (conn.getSources() != null) {
sb.append("[");
ModelSource[] srcs = conn.getSources();
ModelSource[] srcs2 = new ModelSource[srcs.length];
for (int i = 0; i < srcs.length; i++)
srcs2[i] = srcs[i];
Arrays.sort(srcs2, keySortComparator);
for (int i = 0; i < srcs.length; i++) {
sb.append(srcs[i].getIdentifier());
sb.append(";");
}
sb.append("]");
}
sb.append(";");
if (conn.getDestination() != null) {
sb.append(conn.getDestination().getIdentifier());
}
sb.append(";");
return sb.toString();
}
private void processSource(ModelSource src, int ix) {
ModelIdentifier id = src.getIdentifier();
String o = id.getObject();
if (o.equals("midi_cc"))
processMidiControlSource(src, ix);
else if (o.equals("midi_rpn"))
processMidiRpnSource(src, ix);
else if (o.equals("midi_nrpn"))
processMidiNrpnSource(src, ix);
else if (o.equals("midi"))
processMidiSource(src, ix);
else if (o.equals("noteon"))
processNoteOnSource(src, ix);
else if (o.equals("osc"))
return;
else if (o.equals("mixer"))
return;
else
ctrl_connections_list.add(ix);
}
private void processMidiControlSource(ModelSource src, int ix) {
String v = src.getIdentifier().getVariable();
if (v == null)
return;
int c = Integer.parseInt(v);
if (midi_ctrl_connections[c] == null)
midi_ctrl_connections[c] = new int[]{ix};
else {
int[] olda = midi_ctrl_connections[c];
int[] newa = new int[olda.length + 1];
for (int i = 0; i < olda.length; i++)
newa[i] = olda[i];
newa[newa.length - 1] = ix;
midi_ctrl_connections[c] = newa;
}
}
private void processNoteOnSource(ModelSource src, int ix) {
String v = src.getIdentifier().getVariable();
int c = -1;
if (v.equals("on"))
c = 3;
if (v.equals("keynumber"))
c = 4;
if (c == -1)
return;
if (midi_connections[c] == null)
midi_connections[c] = new int[]{ix};
else {
int[] olda = midi_connections[c];
int[] newa = new int[olda.length + 1];
for (int i = 0; i < olda.length; i++)
newa[i] = olda[i];
newa[newa.length - 1] = ix;
midi_connections[c] = newa;
}
}
private void processMidiSource(ModelSource src, int ix) {
String v = src.getIdentifier().getVariable();
int c = -1;
if (v.equals("pitch"))
c = 0;
if (v.equals("channel_pressure"))
c = 1;
if (v.equals("poly_pressure"))
c = 2;
if (c == -1)
return;
if (midi_connections[c] == null)
midi_connections[c] = new int[]{ix};
else {
int[] olda = midi_connections[c];
int[] newa = new int[olda.length + 1];
for (int i = 0; i < olda.length; i++)
newa[i] = olda[i];
newa[newa.length - 1] = ix;
midi_connections[c] = newa;
}
}
private void processMidiRpnSource(ModelSource src, int ix) {
String v = src.getIdentifier().getVariable();
if (v == null)
return;
int c = Integer.parseInt(v);
if (midi_rpn_connections.get(c) == null)
midi_rpn_connections.put(c, new int[]{ix});
else {
int[] olda = midi_rpn_connections.get(c);
int[] newa = new int[olda.length + 1];
for (int i = 0; i < olda.length; i++)
newa[i] = olda[i];
newa[newa.length - 1] = ix;
midi_rpn_connections.put(c, newa);
}
}
private void processMidiNrpnSource(ModelSource src, int ix) {
String v = src.getIdentifier().getVariable();
if (v == null)
return;
int c = Integer.parseInt(v);
if (midi_nrpn_connections.get(c) == null)
midi_nrpn_connections.put(c, new int[]{ix});
else {
int[] olda = midi_nrpn_connections.get(c);
int[] newa = new int[olda.length + 1];
for (int i = 0; i < olda.length; i++)
newa[i] = olda[i];
newa[newa.length - 1] = ix;
midi_nrpn_connections.put(c, newa);
}
}
public SoftPerformer(ModelPerformer performer) {
this.performer = performer;
keyFrom = performer.getKeyFrom();
keyTo = performer.getKeyTo();
velFrom = performer.getVelFrom();
velTo = performer.getVelTo();
exclusiveClass = performer.getExclusiveClass();
selfNonExclusive = performer.isSelfNonExclusive();
Map<String, ModelConnectionBlock> connmap = new HashMap<String, ModelConnectionBlock>();
List<ModelConnectionBlock> performer_connections = new ArrayList<ModelConnectionBlock>();
performer_connections.addAll(performer.getConnectionBlocks());
if (performer.isDefaultConnectionsEnabled()) {
// Add modulation depth range (RPN 5) to the modulation wheel (cc#1)
boolean isModulationWheelConectionFound = false;
for (int j = 0; j < performer_connections.size(); j++) {
ModelConnectionBlock connection = performer_connections.get(j);
ModelSource[] sources = connection.getSources();
ModelDestination dest = connection.getDestination();
boolean isModulationWheelConection = false;
if (dest != null && sources != null && sources.length > 1) {
for (int i = 0; i < sources.length; i++) {
// check if connection block has the source "modulation
// wheel cc#1"
if (sources[i].getIdentifier().getObject().equals(
"midi_cc")) {
if (sources[i].getIdentifier().getVariable()
.equals("1")) {
isModulationWheelConection = true;
isModulationWheelConectionFound = true;
break;
}
}
}
}
if (isModulationWheelConection) {
ModelConnectionBlock newconnection = new ModelConnectionBlock();
newconnection.setSources(connection.getSources());
newconnection.setDestination(connection.getDestination());
newconnection.addSource(new ModelSource(
new ModelIdentifier("midi_rpn", "5")));
newconnection.setScale(connection.getScale() * 256.0);
performer_connections.set(j, newconnection);
}
}
if (!isModulationWheelConectionFound) {
ModelConnectionBlock conn = new ModelConnectionBlock(
new ModelSource(ModelSource.SOURCE_LFO1,
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
new ModelSource(new ModelIdentifier("midi_cc", "1", 0),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_UNIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
50,
new ModelDestination(ModelDestination.DESTINATION_PITCH));
conn.addSource(new ModelSource(new ModelIdentifier("midi_rpn",
"5")));
conn.setScale(conn.getScale() * 256.0);
performer_connections.add(conn);
}
// Let Aftertouch to behave just like modulation wheel (cc#1)
boolean channel_pressure_set = false;
boolean poly_pressure = false;
ModelConnectionBlock mod_cc_1_connection = null;
int mod_cc_1_connection_src_ix = 0;
for (ModelConnectionBlock connection : performer_connections) {
ModelSource[] sources = connection.getSources();
ModelDestination dest = connection.getDestination();
// if(dest != null && sources != null)
if (dest != null && sources != null) {
for (int i = 0; i < sources.length; i++) {
ModelIdentifier srcid = sources[i].getIdentifier();
// check if connection block has the source "modulation
// wheel cc#1"
if (srcid.getObject().equals("midi_cc")) {
if (srcid.getVariable().equals("1")) {
mod_cc_1_connection = connection;
mod_cc_1_connection_src_ix = i;
}
}
// check if channel or poly pressure are already
// connected
if (srcid.getObject().equals("midi")) {
if (srcid.getVariable().equals("channel_pressure"))
channel_pressure_set = true;
if (srcid.getVariable().equals("poly_pressure"))
poly_pressure = true;
}
}
}
}
if (mod_cc_1_connection != null) {
if (!channel_pressure_set) {
ModelConnectionBlock mc = new ModelConnectionBlock();
mc.setDestination(mod_cc_1_connection.getDestination());
mc.setScale(mod_cc_1_connection.getScale());
ModelSource[] src_list = mod_cc_1_connection.getSources();
ModelSource[] src_list_new = new ModelSource[src_list.length];
for (int i = 0; i < src_list_new.length; i++)
src_list_new[i] = src_list[i];
src_list_new[mod_cc_1_connection_src_ix] = new ModelSource(
new ModelIdentifier("midi", "channel_pressure"));
mc.setSources(src_list_new);
connmap.put(extractKeys(mc), mc);
}
if (!poly_pressure) {
ModelConnectionBlock mc = new ModelConnectionBlock();
mc.setDestination(mod_cc_1_connection.getDestination());
mc.setScale(mod_cc_1_connection.getScale());
ModelSource[] src_list = mod_cc_1_connection.getSources();
ModelSource[] src_list_new = new ModelSource[src_list.length];
for (int i = 0; i < src_list_new.length; i++)
src_list_new[i] = src_list[i];
src_list_new[mod_cc_1_connection_src_ix] = new ModelSource(
new ModelIdentifier("midi", "poly_pressure"));
mc.setSources(src_list_new);
connmap.put(extractKeys(mc), mc);
}
}
// Enable Vibration Sound Controllers : 76, 77, 78
ModelConnectionBlock found_vib_connection = null;
for (ModelConnectionBlock connection : performer_connections) {
ModelSource[] sources = connection.getSources();
if (sources.length != 0
&& sources[0].getIdentifier().getObject().equals("lfo")) {
if (connection.getDestination().getIdentifier().equals(
ModelDestination.DESTINATION_PITCH)) {
if (found_vib_connection == null)
found_vib_connection = connection;
else {
if (found_vib_connection.getSources().length > sources.length)
found_vib_connection = connection;
else if (found_vib_connection.getSources()[0]
.getIdentifier().getInstance() < 1) {
if (found_vib_connection.getSources()[0]
.getIdentifier().getInstance() >
sources[0].getIdentifier().getInstance()) {
found_vib_connection = connection;
}
}
}
}
}
}
int instance = 1;
if (found_vib_connection != null) {
instance = found_vib_connection.getSources()[0].getIdentifier()
.getInstance();
}
ModelConnectionBlock connection;
connection = new ModelConnectionBlock(
new ModelSource(new ModelIdentifier("midi_cc", "78"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
2000, new ModelDestination(
new ModelIdentifier("lfo", "delay2", instance)));
connmap.put(extractKeys(connection), connection);
final double scale = found_vib_connection == null ? 0
: found_vib_connection.getScale();
connection = new ModelConnectionBlock(
new ModelSource(new ModelIdentifier("lfo", instance)),
new ModelSource(new ModelIdentifier("midi_cc", "77"),
new ModelTransform() {
double s = scale;
public double transform(double value) {
value = value * 2 - 1;
value *= 600;
if (s == 0) {
return value;
} else if (s > 0) {
if (value < -s)
value = -s;
return value;
} else {
if (value < s)
value = -s;
return -value;
}
}
}), new ModelDestination(ModelDestination.DESTINATION_PITCH));
connmap.put(extractKeys(connection), connection);
connection = new ModelConnectionBlock(
new ModelSource(new ModelIdentifier("midi_cc", "76"),
ModelStandardTransform.DIRECTION_MIN2MAX,
ModelStandardTransform.POLARITY_BIPOLAR,
ModelStandardTransform.TRANSFORM_LINEAR),
2400, new ModelDestination(
new ModelIdentifier("lfo", "freq", instance)));
connmap.put(extractKeys(connection), connection);
}
// Add default connection blocks
if (performer.isDefaultConnectionsEnabled())
for (ModelConnectionBlock connection : defaultconnections)
connmap.put(extractKeys(connection), connection);
// Add connection blocks from modelperformer
for (ModelConnectionBlock connection : performer_connections)
connmap.put(extractKeys(connection), connection);
// seperate connection blocks : Init time, Midi Time, Midi/Control Time,
// Control Time
List<ModelConnectionBlock> connections = new ArrayList<ModelConnectionBlock>();
midi_ctrl_connections = new int[128][];
for (int i = 0; i < midi_ctrl_connections.length; i++) {
midi_ctrl_connections[i] = null;
}
midi_connections = new int[5][];
for (int i = 0; i < midi_connections.length; i++) {
midi_connections[i] = null;
}
int ix = 0;
boolean mustBeOnTop = false;
for (ModelConnectionBlock connection : connmap.values()) {
if (connection.getDestination() != null) {
ModelDestination dest = connection.getDestination();
ModelIdentifier id = dest.getIdentifier();
if (id.getObject().equals("noteon")) {
mustBeOnTop = true;
if (id.getVariable().equals("keynumber"))
forcedKeynumber = true;
if (id.getVariable().equals("velocity"))
forcedVelocity = true;
}
}
if (mustBeOnTop) {
connections.add(0, connection);
mustBeOnTop = false;
} else
connections.add(connection);
}
for (ModelConnectionBlock connection : connections) {
if (connection.getSources() != null) {
ModelSource[] srcs = connection.getSources();
for (int i = 0; i < srcs.length; i++) {
processSource(srcs[i], ix);
}
}
ix++;
}
this.connections = new ModelConnectionBlock[connections.size()];
connections.toArray(this.connections);
this.ctrl_connections = new int[ctrl_connections_list.size()];
for (int i = 0; i < this.ctrl_connections.length; i++)
this.ctrl_connections[i] = ctrl_connections_list.get(i);
oscillators = new ModelOscillator[performer.getOscillators().size()];
performer.getOscillators().toArray(oscillators);
for (ModelConnectionBlock conn : connections) {
if (conn.getDestination() != null) {
if (isUnnecessaryTransform(conn.getDestination().getTransform())) {
conn.getDestination().setTransform(null);
}
}
if (conn.getSources() != null) {
for (ModelSource src : conn.getSources()) {
if (isUnnecessaryTransform(src.getTransform())) {
src.setTransform(null);
}
}
}
}
}
private static boolean isUnnecessaryTransform(ModelTransform transform) {
if (transform == null)
return false;
if (!(transform instanceof ModelStandardTransform))
return false;
ModelStandardTransform stransform = (ModelStandardTransform)transform;
if (stransform.getDirection() != ModelStandardTransform.DIRECTION_MIN2MAX)
return false;
if (stransform.getPolarity() != ModelStandardTransform.POLARITY_UNIPOLAR)
return false;
if (stransform.getTransform() != ModelStandardTransform.TRANSFORM_LINEAR)
return false;
return false;
}
}
|
googleapis/google-cloud-java | 35,211 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListApisResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Response message for ListApis.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApisResponse}
*/
public final class ListApisResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListApisResponse)
ListApisResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApisResponse.newBuilder() to construct.
private ListApisResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApisResponse() {
apis_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApisResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApisResponse.class,
com.google.cloud.apigeeregistry.v1.ListApisResponse.Builder.class);
}
public static final int APIS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.apigeeregistry.v1.Api> apis_;
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.apigeeregistry.v1.Api> getApisList() {
return apis_;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
getApisOrBuilderList() {
return apis_;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
@java.lang.Override
public int getApisCount() {
return apis_.size();
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.Api getApis(int index) {
return apis_.get(index);
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ApiOrBuilder getApisOrBuilder(int index) {
return apis_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < apis_.size(); i++) {
output.writeMessage(1, apis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < apis_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, apis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListApisResponse)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListApisResponse other =
(com.google.cloud.apigeeregistry.v1.ListApisResponse) obj;
if (!getApisList().equals(other.getApisList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getApisCount() > 0) {
hash = (37 * hash) + APIS_FIELD_NUMBER;
hash = (53 * hash) + getApisList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apigeeregistry.v1.ListApisResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListApis.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListApisResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListApisResponse)
com.google.cloud.apigeeregistry.v1.ListApisResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListApisResponse.class,
com.google.cloud.apigeeregistry.v1.ListApisResponse.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListApisResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (apisBuilder_ == null) {
apis_ = java.util.Collections.emptyList();
} else {
apis_ = null;
apisBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListApisResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisResponse getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListApisResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisResponse build() {
com.google.cloud.apigeeregistry.v1.ListApisResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisResponse buildPartial() {
com.google.cloud.apigeeregistry.v1.ListApisResponse result =
new com.google.cloud.apigeeregistry.v1.ListApisResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.apigeeregistry.v1.ListApisResponse result) {
if (apisBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
apis_ = java.util.Collections.unmodifiableList(apis_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.apis_ = apis_;
} else {
result.apis_ = apisBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.ListApisResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListApisResponse) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListApisResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListApisResponse other) {
if (other == com.google.cloud.apigeeregistry.v1.ListApisResponse.getDefaultInstance())
return this;
if (apisBuilder_ == null) {
if (!other.apis_.isEmpty()) {
if (apis_.isEmpty()) {
apis_ = other.apis_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureApisIsMutable();
apis_.addAll(other.apis_);
}
onChanged();
}
} else {
if (!other.apis_.isEmpty()) {
if (apisBuilder_.isEmpty()) {
apisBuilder_.dispose();
apisBuilder_ = null;
apis_ = other.apis_;
bitField0_ = (bitField0_ & ~0x00000001);
apisBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getApisFieldBuilder()
: null;
} else {
apisBuilder_.addAllMessages(other.apis_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.apigeeregistry.v1.Api m =
input.readMessage(
com.google.cloud.apigeeregistry.v1.Api.parser(), extensionRegistry);
if (apisBuilder_ == null) {
ensureApisIsMutable();
apis_.add(m);
} else {
apisBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.apigeeregistry.v1.Api> apis_ =
java.util.Collections.emptyList();
private void ensureApisIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
apis_ = new java.util.ArrayList<com.google.cloud.apigeeregistry.v1.Api>(apis_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
apisBuilder_;
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public java.util.List<com.google.cloud.apigeeregistry.v1.Api> getApisList() {
if (apisBuilder_ == null) {
return java.util.Collections.unmodifiableList(apis_);
} else {
return apisBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public int getApisCount() {
if (apisBuilder_ == null) {
return apis_.size();
} else {
return apisBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Api getApis(int index) {
if (apisBuilder_ == null) {
return apis_.get(index);
} else {
return apisBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder setApis(int index, com.google.cloud.apigeeregistry.v1.Api value) {
if (apisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApisIsMutable();
apis_.set(index, value);
onChanged();
} else {
apisBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder setApis(
int index, com.google.cloud.apigeeregistry.v1.Api.Builder builderForValue) {
if (apisBuilder_ == null) {
ensureApisIsMutable();
apis_.set(index, builderForValue.build());
onChanged();
} else {
apisBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder addApis(com.google.cloud.apigeeregistry.v1.Api value) {
if (apisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApisIsMutable();
apis_.add(value);
onChanged();
} else {
apisBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder addApis(int index, com.google.cloud.apigeeregistry.v1.Api value) {
if (apisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApisIsMutable();
apis_.add(index, value);
onChanged();
} else {
apisBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder addApis(com.google.cloud.apigeeregistry.v1.Api.Builder builderForValue) {
if (apisBuilder_ == null) {
ensureApisIsMutable();
apis_.add(builderForValue.build());
onChanged();
} else {
apisBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder addApis(
int index, com.google.cloud.apigeeregistry.v1.Api.Builder builderForValue) {
if (apisBuilder_ == null) {
ensureApisIsMutable();
apis_.add(index, builderForValue.build());
onChanged();
} else {
apisBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder addAllApis(
java.lang.Iterable<? extends com.google.cloud.apigeeregistry.v1.Api> values) {
if (apisBuilder_ == null) {
ensureApisIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, apis_);
onChanged();
} else {
apisBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder clearApis() {
if (apisBuilder_ == null) {
apis_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
apisBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public Builder removeApis(int index) {
if (apisBuilder_ == null) {
ensureApisIsMutable();
apis_.remove(index);
onChanged();
} else {
apisBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Api.Builder getApisBuilder(int index) {
return getApisFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.ApiOrBuilder getApisOrBuilder(int index) {
if (apisBuilder_ == null) {
return apis_.get(index);
} else {
return apisBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
getApisOrBuilderList() {
if (apisBuilder_ != null) {
return apisBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(apis_);
}
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Api.Builder addApisBuilder() {
return getApisFieldBuilder()
.addBuilder(com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance());
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Api.Builder addApisBuilder(int index) {
return getApisFieldBuilder()
.addBuilder(index, com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance());
}
/**
*
*
* <pre>
* The APIs from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Api apis = 1;</code>
*/
public java.util.List<com.google.cloud.apigeeregistry.v1.Api.Builder> getApisBuilderList() {
return getApisFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
getApisFieldBuilder() {
if (apisBuilder_ == null) {
apisBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>(
apis_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
apis_ = null;
}
return apisBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListApisResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListApisResponse)
private static final com.google.cloud.apigeeregistry.v1.ListApisResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListApisResponse();
}
public static com.google.cloud.apigeeregistry.v1.ListApisResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApisResponse> PARSER =
new com.google.protobuf.AbstractParser<ListApisResponse>() {
@java.lang.Override
public ListApisResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApisResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApisResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListApisResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,233 | java-automl/proto-google-cloud-automl-v1/src/main/java/com/google/cloud/automl/v1/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.automl.v1;
/**
*
*
* <pre>
* Response message for [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1.AutoMlProto
.internal_static_google_cloud_automl_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1.AutoMlProto
.internal_static_google_cloud_automl_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1.ListDatasetsResponse.class,
com.google.cloud.automl.v1.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.automl.v1.Dataset> datasets_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.automl.v1.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.automl.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.automl.v1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.cloud.automl.v1.ListDatasetsResponse other =
(com.google.cloud.automl.v1.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1.ListDatasetsResponse)
com.google.cloud.automl.v1.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1.AutoMlProto
.internal_static_google_cloud_automl_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1.AutoMlProto
.internal_static_google_cloud_automl_v1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1.ListDatasetsResponse.class,
com.google.cloud.automl.v1.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.cloud.automl.v1.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1.AutoMlProto
.internal_static_google_cloud_automl_v1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1.ListDatasetsResponse getDefaultInstanceForType() {
return com.google.cloud.automl.v1.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1.ListDatasetsResponse build() {
com.google.cloud.automl.v1.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1.ListDatasetsResponse buildPartial() {
com.google.cloud.automl.v1.ListDatasetsResponse result =
new com.google.cloud.automl.v1.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.automl.v1.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.automl.v1.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1.ListDatasetsResponse) {
return mergeFrom((com.google.cloud.automl.v1.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1.ListDatasetsResponse other) {
if (other == com.google.cloud.automl.v1.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.automl.v1.Dataset m =
input.readMessage(
com.google.cloud.automl.v1.Dataset.parser(), extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.automl.v1.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ = new java.util.ArrayList<com.google.cloud.automl.v1.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1.Dataset,
com.google.cloud.automl.v1.Dataset.Builder,
com.google.cloud.automl.v1.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.cloud.automl.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.cloud.automl.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.automl.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.cloud.automl.v1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.automl.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.cloud.automl.v1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.cloud.automl.v1.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.automl.v1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.cloud.automl.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public com.google.cloud.automl.v1.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.cloud.automl.v1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The datasets read.
* </pre>
*
* <code>repeated .google.cloud.automl.v1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.automl.v1.Dataset.Builder> getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1.Dataset,
com.google.cloud.automl.v1.Dataset.Builder,
com.google.cloud.automl.v1.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.automl.v1.Dataset,
com.google.cloud.automl.v1.Dataset.Builder,
com.google.cloud.automl.v1.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsResponse)
private static final com.google.cloud.automl.v1.ListDatasetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1.ListDatasetsResponse();
}
public static com.google.cloud.automl.v1.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/sis | 35,617 | endorsed/src/org.apache.sis.metadata/main/org/apache/sis/xml/XML.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.xml;
import java.time.ZoneId;
import java.util.Map;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.Filter;
import java.util.logging.LogRecord; // For javadoc
import java.net.URISyntaxException;
import java.net.URL;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.BufferedInputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import javax.xml.transform.Source;
import javax.xml.transform.Result;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stax.StAXResult;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import jakarta.xml.bind.Marshaller;
import jakarta.xml.bind.Unmarshaller;
import jakarta.xml.bind.JAXBElement;
import jakarta.xml.bind.JAXBException;
import org.apache.sis.util.Version;
import org.apache.sis.util.Workaround;
import org.apache.sis.util.resources.Errors;
import org.apache.sis.system.Modules;
import org.apache.sis.system.SystemListener;
import org.apache.sis.xml.internal.shared.URISource;
import org.apache.sis.xml.bind.TypeRegistration;
import static org.apache.sis.util.ArgumentChecks.ensureNonNull;
/**
* Provides convenience methods for marshalling and unmarshalling SIS objects.
* Marshalling operations use the standard versions listed below
* (for marshalling a document in a different version, see {@link MarshallerPool}).
* Unmarshalling detects the version automatically.
*
* <table class="sis">
* <caption>Versions of standards applied at marshalling time</caption>
* <tr><th>Topic</th> <th>SIS 0.3 to 0.8</th> <th>SIS 1.0</th> <th>Remarks</th></tr>
* <tr><td>Metadata</td> <td>ISO 19139:2007</td> <td>ISO 19115-3:2016</td> <td></td></tr>
* <tr><td>Referencing</td> <td>ISO 19136:2007</td> <td>ISO 19136:2007</td> <td>Same as GML 3.2</td></tr>
* </table>
*
* This class defines also some property keys that can be given to the {@link Marshaller}
* and {@link Unmarshaller} instances created by {@link MarshallerPool}:
*
* <table class="sis">
* <caption>Supported (un)marshaller properties</caption>
* <tr><th>Key</th> <th>Value type</th> <th>Purpose</th></tr>
* <tr><td>{@link #LOCALE}</td> <td>{@link Locale}</td> <td>for specifying the locale to use for international strings and code lists.</td></tr>
* <tr><td>{@link #TIMEZONE}</td> <td>{@link TimeZone}</td> <td>for specifying the timezone to use for dates and times.</td></tr>
* <tr><td>{@link #SCHEMAS}</td> <td>{@link Map}</td> <td>for specifying the root URL of metadata schemas to use.</td></tr>
* <tr><td>{@link #GML_VERSION}</td> <td>{@link Version}</td> <td>for specifying the GML version of the document to be (un)marshalled.</td></tr>
* <tr><td>{@link #METADATA_VERSION}</td> <td>{@link Version}</td> <td>for specifying the metadata version of the document to be (un)marshalled.</td></tr>
* <tr><td>{@link #RESOLVER}</td> <td>{@link ReferenceResolver}</td> <td>for replacing {@code xlink} or {@code uuidref} attributes by the actual object to use.</td></tr>
* <tr><td>{@link #CONVERTER}</td> <td>{@link ValueConverter}</td> <td>for controlling the conversion of URL, UUID, Units or similar objects.</td></tr>
* <tr><td>{@link #STRING_SUBSTITUTES}</td> <td>{@code String[]}</td> <td>for specifying which code lists to replace by simpler {@code <gco:CharacterString>} elements.</td></tr>
* <tr><td>{@link #WARNING_FILTER}</td> <td>{@link Filter}</td> <td>for being notified about non-fatal warnings.</td></tr>
* </table>
*
* @author Cédric Briançon (Geomatys)
* @author Martin Desruisseaux (Geomatys)
* @author Cullen Rombach (Image Matters)
* @version 1.5
* @since 0.3
*/
public final class XML {
/**
* Specifies the locale to use for marshalling
* {@link org.opengis.util.InternationalString} and {@link org.opengis.util.CodeList}
* instances. The value for this property shall be an instance of {@link Locale} or a
* {@link CharSequence} recognized by {@link org.apache.sis.util.Locales#parse(String)}.
*
* <p>This property is mostly for marshallers. However, this property can also be used at
* unmarshalling time, for example if a {@code <lan:PT_FreeText>} element containing
* many localized strings need to be represented in a Java {@link String} object. In
* such case, the unmarshaller will try to pickup a string in the language specified
* by this property.</p>
*
* <h4>Default behavior</h4>
* If this property is never set, then (un)marshalling will try to use "unlocalized" strings -
* typically some programmatic strings like {@linkplain org.opengis.annotation.UML#identifier()
* UML identifiers}. While such identifiers often look like English words, they are not
* considered as the {@linkplain Locale#ENGLISH English} localization.
* The algorithm attempting to find a "unlocalized" string is defined in the
* {@link org.apache.sis.util.DefaultInternationalString#toString(Locale)} javadoc.
*
* <h4>Special case</h4>
* If the object to be marshalled is an instance of
* {@link org.apache.sis.metadata.iso.DefaultMetadata}, then the value given to its
* {@link org.apache.sis.metadata.iso.DefaultMetadata#setLanguage(Locale) setLanguage(Locale)}
* method will have precedence over this property. This behavior is compliant with INSPIRE rules.
*
* @see org.apache.sis.setup.OptionKey#LOCALE
* @see Marshaller#setProperty(String, Object)
* @see org.apache.sis.metadata.iso.DefaultMetadata#setLanguage(Locale)
*/
public static final String LOCALE = "org.apache.sis.xml.locale";
/**
* Specifies the timezone to use for marshalling dates and times.
* The value for this property shall be an instance of {@link ZoneId}, {@link TimeZone}, or a
* {@link CharSequence} recognized by {@link ZoneId#of(String)} or {@link TimeZone#getTimeZone(String)}.
*
* <h4>Default behavior</h4>
* If this property is never set, then (un)marshalling will use the
* {@linkplain TimeZone#getDefault() default timezone}.
*
* @see org.apache.sis.setup.OptionKey#TIMEZONE
*/
public static final String TIMEZONE = "org.apache.sis.xml.timezone";
/**
* Specifies the root URLs of some schemas.
* This property modifies only the URL strings; it does not change the structure of
* marshalled XML documents (for content structure, see {@link #METADATA_VERSION}).
* The value for this property shall be an instance of {@link Map Map<String,String>}.
* This property controls the URLs to be used when marshalling the following elements:
*
* <ul>
* <li>The value of the {@code codeList} attribute when marshalling subclasses of
* {@link org.opengis.util.CodeList}.</li>
* <li>The value of the {@code uom} attribute when marshalling measures
* (for example {@code <gco:Distance>}).</li>
* </ul>
*
* <h4>Examples</h4>
* URLs in code lists and is units of measurement may appear as below.
* The underlined fragment is the part that can be replaced by {@code SCHEMAS} values:
* <ul>
* <li><code><u>http://standards.iso.org/iso/19115/</u>resources/Codelist/cat/codelists.xml#LanguageCode</code></li>
* <li><code><u>http://www.isotc211.org/2005/</u>resources/Codelist/gmxCodelists.xml#LanguageCode</code></li>
* <li><code><u>http://www.isotc211.org/2005/</u>resources/uom/gmxUom.xml#xpointer(//*[@gml:id='m'])</code></li>
* </ul>
*
* <h4>Implementation note</h4>
* The currently recognized keys are listed below.
* The entries to be used depend on the {@linkplain #METADATA_VERSION metadata version} to be marshalled.
* For example, the {@code "cat"} entry is used when marshalling ISO 19115-3:2016 document, while the
* {@code "gmd"} and {@code "gmi"} entries are used when marshalling ISO 19139:2007 documents.
* The following table gives some typical URLs, with the default URL in bold characters:
*
* <table class="sis">
* <caption>Supported root URLs</caption>
* <tr>
* <th>Map key</th>
* <th>Typical values (choose only one)</th>
* </tr><tr>
* <td><b>cat</b></td>
* <td><b>http://standards.iso.org/iso/19115/</b></td>
* </tr><tr>
* <td class="hsep"><b>gmd</b></td>
* <td class="hsep">
* <b>http://www.isotc211.org/2005/</b><br>
* http://schemas.opengis.net/iso/19139/20070417/<br>
* http://standards.iso.org/ittf/PubliclyAvailableStandards/ISO_19139_Schemas/</td>
* </tr>
* </table>
*
* Additional keys, if any, are ignored. Future SIS versions may recognize more keys.
*/
public static final String SCHEMAS = "org.apache.sis.xml.schemas";
// If more keys are documented, update the Pooled.SCHEMAS_KEY array.
/**
* Specifies the GML version of the document to be marshalled or unmarshalled.
* The GML version may affect the set of XML elements to be marshalled and their namespaces.
* Note that GML 3.2 is identical to ISO 19136:2007.
*
* The value can be {@link String} or {@link Version} object.
* If no version is specified, then the most recent supported GML version is assumed.
*
* <h4>Supported GML versions</h4>
* Apache SIS currently supports GML 3.2.1 by default. SIS can read and write GML 3.2
* if this property is set to "3.2". It is also possible to set this property to "3.1",
* but the marshalled XML is not GML 3.1.1 conformant because of the differences between the two schemas.
* See <a href="http://issues.apache.org/jira/browse/SIS-160">SIS-160: Need XSLT between GML 3.1 and 3.2</a>
* for information about the status of GML 3.1.1 support.
*
* <h4>Compatibility note</h4>
* Newer GML versions typically have more elements, but not always. For example, in {@code <gml:VerticalDatum>},
* the {@code <gml:verticalDatumType>} property presents in GML 3.0 and 3.1 has been removed in GML 3.2.
*/
public static final String GML_VERSION = "org.apache.sis.gml.version";
/**
* Specifies the metadata version of the document to be marshalled or unmarshalled.
* The metadata version may affect the set of XML elements to be marshalled and their namespaces.
* The value can be {@link String} or {@link Version} object.
* If no version is specified, then the most recent supported metadata version is assumed.
*
* <p>The metadata version may be ignored when the metadata to marshal is inside a GML element.
* For example, the {@code <gml:domainOfValidity>} element inside a coordinate reference system
* is always marshalled using ISO 19139:2007 if the enclosing element uses GML 3.2 schema.</p>
*
* <h4>Supported metadata versions</h4>
* Apache SIS currently supports ISO 19115-3:2016 by default. This version can be explicitly
* set with value "2014" or above (because the abstract model was defined in ISO 19115-1:2014).
* SIS can write legacy ISO 19139:2007 documents if this property is set to a value less than "2014".
* Both versions can be read without the need to specify this property.
*
* @since 1.0
*/
public static final String METADATA_VERSION = "org.apache.sis.xml.version.metadata";
/**
* Specifies whether the unmarshalling process should accept any metadata or GML supported version
* if the user did not specify an explicit version. The value can be a {@link Boolean} instance,
* or {@code "true"} or {@code "false"} as a {@link String}. If this value is not specified, then
* the default is {@code true} for all {@code XML.unmarshal} methods and {@code false} otherwise.
*
* <p>Metadata and Geographic Markup Language have slightly different XML encoding depending on the
* OGC/ISO version in use. Often the namespaces are different, but not only. Internally, Apache SIS
* supports only the schema versions documented in this {@linkplain XML class javadoc}, for example
* the ISO 19115-3:2016 version of metadata schema. For unmarshalling a document encoded according
* an older metadata schema (e.g. ISO 19139:2007), a transformation is applied on-the-fly. However
* this transformation may sometimes produce undesirable results or make debugging more difficult.
* For this reason {@link MarshallerPool} applies the transformation only if explicitly requested,
* either by setting a {@link #METADATA_VERSION} or {@link #GML_VERSION} explicitly, or by setting
* this {@code LENIENT_UNMARSHAL} property to {@code true} if the version to unmarshal is not known
* in advance.</p>
*
* @since 1.0
*/
public static final String LENIENT_UNMARSHAL = "org.apache.sis.xml.lenient";
/**
* Allows client code to replace {@code xlink} or {@code uuidref} attributes by the actual objects to use.
* The value for this property shall be an instance of {@link ReferenceResolver}.
*
* <p>If a property in a XML document is defined only by {@code xlink} or {@code uuidref} attributes,
* without any concrete definition, then the default behavior is as below:</p>
*
* <ul>
* <li>If the reference is of the form {@code xlink:href="#foo"} and an object with the {@code gml:id="foo"}
* attribute was previously found in the same XML document, then that object will be used.</li>
* <li>Otherwise, if {@code xlink:href} references an external document, that document is unmarshalled.
* The URI resolution can be controlled with an {@link javax.xml.transform.URIResolver} specified
* at construction time.</li>
* <li>Otherwise, an empty element containing only the values of the above-cited attributes is created.</li>
* </ul>
*
* Applications can sometimes do better by using some domain-specific knowledge, for example by searching in a
* database. Users can define their search algorithm by subclassing {@link ReferenceResolver} and configuring
* a unmarshaller as below:
*
* {@snippet lang="java" :
* ReferenceResolver myResolver = ...;
* Map<String,Object> properties = new HashMap<>();
* properties.put(XML.RESOLVER, myResolver);
* Object obj = XML.unmarshal(source, properties);
* }
*
* @see Unmarshaller#setProperty(String, Object)
* @see ReferenceResolver
*/
public static final String RESOLVER = "org.apache.sis.xml.resolver";
/**
* Controls the behaviors of the (un)marshalling process when an element cannot be processed,
* or alter the element values. The value for this property shall be an instance of {@link ValueConverter}.
*
* <p>If an element in a XML document cannot be parsed (for example if a {@linkplain java.net.URL}
* string is not valid), the default behavior is to throw an exception which cause the
* (un)marshalling of the entire document to fail. This default behavior can be customized by
* invoking {@link Marshaller#setProperty(String, Object)} with this {@code CONVERTER} property
* key and a custom {@link ValueConverter} instance. {@code ValueConverter} can also be used
* for replacing an erroneous URL by a fixed URL. See the {@link ValueConverter} javadoc for
* more details.</p>
*
* <h4>Example</h4>
* The following example collects the failures in a list without stopping the (un)marshalling process.
*
* {@snippet lang="java" :
* class WarningCollector extends ValueConverter {
* // The warnings collected during (un)marshalling.
* List<String> messages = new ArrayList<String>();
*
* // Override the default implementation in order to
* // collect the warnings and allow the process to continue.
* @Override
* protected <T> boolean exceptionOccured(MarshalContext context,
* T value, Class<T> sourceType, Class<T> targetType, Exception e)
* {
* mesages.add(e.getLocalizedMessage());
* return true;
* }
* }
*
* // Unmarshal a XML string, trapping some kind of errors.
* // Not all errors are trapped - see the ValueConverter
* // javadoc for more details.
* WarningCollector myWarningList = new WarningCollector();
* Map<String,Object> properties = new HashMap<>();
* properties.put(XML.CONVERTER, myWarningList);
* Object obj = XML.unmarshal(source, properties);
* if (!myWarningList.isEmpty()) {
* // Report here the warnings to the user.
* }
* }
*
* @see Unmarshaller#setProperty(String, Object)
* @see ValueConverter
*/
public static final String CONVERTER = "org.apache.sis.xml.converter";
/**
* Allows marshallers to substitute some code lists by the simpler {@code <gco:CharacterString>} element.
* The value for this property shall be a {@code String[]} array of any of the following values:
*
* <ul>
* <li>"{@code language}" for substituting {@code <lan:LanguageCode>} elements</li>
* <li>"{@code country}" for substituting {@code <lan:Country>} elements</li>
* <li>"{@code filename}" for substituting {@code <gcx:FileName>} elements</li>
* <li>"{@code mimetype}" for substituting {@code <gcx:MimeFileType>} elements</li>
* </ul>
*
* <h4>Example</h4>
* INSPIRE compliant language code shall be formatted like below (details may vary):
*
* {@snippet lang="xml" :
* <gmd:language>
* <gmd:LanguageCode
* codeList="http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml#LanguageCode"
* codeListValue="fra">French</gmd:LanguageCode>
* </gmd:language>
* }
*
* However if this property contains the "{@code language}" value, then the marshaller will format
* the language code like below (which is legal according OGC schemas, but is not INSPIRE compliant):
*
* {@snippet lang="xml" :
* <lan:language>
* <gco:CharacterString>fra</gco:CharacterString>
* </lan:language>
* }
*/
public static final String STRING_SUBSTITUTES = "org.apache.sis.xml.stringSubstitutes";
/**
* Specifies a listener to be notified when a non-fatal error occurred during the (un)marshalling.
* The value for this property shall be an instance of {@link Filter}.
*
* <p>By default, warnings that occur during the (un)marshalling process are logged. However, if a
* property is set for this key, then the {@link Filter#isLoggable(LogRecord)} method will be invoked.
* If that method returns {@code false}, then the warning will not be logged by the (un)marshaller.</p>
*
* @since 1.0
*/
public static final String WARNING_FILTER = "org.apache.sis.xml.warningFilter";
/**
* The pool of marshallers and unmarshallers used by this class.
* The field name uses the uppercase convention because this field is almost constant:
* this field is initially null, then created by {@link #getPool()} when first needed.
* Once created the field value usually doesn't change. However, the field may be reset
* to {@code null} when modules are loaded or unloaded by a container such as OSGi,
* because the set of classes returned by {@link TypeRegistration#load(boolean)} may have changed.
*
* @see #getPool()
*/
private static volatile MarshallerPool POOL;
/**
* Registers a listener for module path changes. In such case, a new pool will need to
* be created because the {@code JAXBContext} may be different.
*/
static {
SystemListener.add(new SystemListener(Modules.UTILITIES) {
@Override protected void classpathChanged() {
POOL = null;
}
});
}
/**
* Do not allow instantiation on this class.
*/
private XML() {
}
/**
* Returns the default (un)marshaller pool used by all methods in this class.
*
* <h4>Implementation note</h4>
* Current implementation uses the double-check idiom. This is usually a deprecated practice
* (the recommended alterative is to use static class initialization), but in this particular
* case the field may be reset to {@code null} if modules are loaded or unloaded by a container,
* so static class initialization would be a little bit too rigid.
*/
@SuppressWarnings("DoubleCheckedLocking")
private static MarshallerPool getPool() throws JAXBException {
MarshallerPool pool = POOL;
if (pool == null) {
synchronized (XML.class) {
pool = POOL; // Double-check idiom: see javadoc.
if (pool == null) {
POOL = pool = new MarshallerPool(Map.of(LENIENT_UNMARSHAL, Boolean.TRUE));
}
}
}
return pool;
}
/**
* Marshal the given object into a string.
*
* @param object the root of content tree to be marshalled.
* @return the XML representation of the given object.
* @throws JAXBException if an error occurred during the marshalling.
*/
public static String marshal(final Object object) throws JAXBException {
ensureNonNull("object", object);
final StringWriter output = new StringWriter();
final MarshallerPool pool = getPool();
final Marshaller marshaller = pool.acquireMarshaller();
marshaller.marshal(object, output);
pool.recycle(marshaller);
return output.toString();
}
/**
* Marshal the given object into a stream.
*
* @param object the root of content tree to be marshalled.
* @param output the stream where to write.
* @throws JAXBException if an error occurred during the marshalling.
*/
public static void marshal(final Object object, final OutputStream output) throws JAXBException {
ensureNonNull("object", object);
ensureNonNull("output", output);
final MarshallerPool pool = getPool();
final Marshaller marshaller = pool.acquireMarshaller();
marshaller.marshal(object, output);
pool.recycle(marshaller);
}
/**
* Marshal the given object into a file.
*
* @param object the root of content tree to be marshalled.
* @param output the file to be written.
* @throws JAXBException if an error occurred during the marshalling.
*/
public static void marshal(final Object object, final File output) throws JAXBException {
ensureNonNull("object", object);
ensureNonNull("output", output);
final MarshallerPool pool = getPool();
final Marshaller marshaller = pool.acquireMarshaller();
marshaller.marshal(object, output);
pool.recycle(marshaller);
}
/**
* Marshal the given object into a path.
*
* @param object the root of content tree to be marshalled.
* @param output the file to be written.
* @throws JAXBException if an error occurred during the marshalling.
*/
public static void marshal(final Object object, final Path output) throws JAXBException {
ensureNonNull("object", object);
ensureNonNull("output", output);
try (OutputStream out = Files.newOutputStream(output, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {
final MarshallerPool pool = getPool();
final Marshaller marshaller = pool.acquireMarshaller();
marshaller.marshal(object, out);
pool.recycle(marshaller);
} catch (IOException e) {
throw new JAXBException(Errors.format(Errors.Keys.CanNotOpen_1, output), e);
}
}
/**
* Marshal the given object to a stream, DOM or other destinations.
* This is the most flexible marshalling method provided in this {@code XML} class.
* The destination is specified by the {@code output} argument implementation, for example
* {@link javax.xml.transform.stream.StreamResult} for writing to a file or output stream.
* The optional {@code properties} map can contain any key documented in this {@code XML} class,
* together with the keys documented in the <i>supported properties</i> section of the
* {@link Marshaller} class.
*
* @param object the root of content tree to be marshalled.
* @param output the file to be written.
* @param properties an optional map of properties to give to the marshaller, or {@code null} if none.
* @throws JAXBException if a property has an illegal value, or if an error occurred during the marshalling.
*
* @since 0.4
*/
public static void marshal(final Object object, final Result output, final Map<String,?> properties) throws JAXBException {
ensureNonNull("object", object);
ensureNonNull("output", output);
final MarshallerPool pool = getPool();
final Marshaller marshaller = pool.acquireMarshaller();
if (properties != null) {
for (final Map.Entry<String,?> entry : properties.entrySet()) {
marshaller.setProperty(entry.getKey(), entry.getValue());
}
}
/*
* STAX results are not handled by JAXB. We have to handle those cases ourselves.
* This workaround should be removed if a future JDK version handles those cases.
*/
if (output instanceof StAXResult) {
@Workaround(library = "JDK", version = "1.8")
final XMLStreamWriter writer = ((StAXResult) output).getXMLStreamWriter();
if (writer != null) {
marshaller.marshal(object, writer);
} else {
marshaller.marshal(object, ((StAXResult) output).getXMLEventWriter());
}
} else {
marshaller.marshal(object, output);
}
pool.recycle(marshaller);
}
/**
* Unmarshal an object from the given string.
* Note that the given argument is the XML document itself,
* <strong>not</strong> a URL to a XML document.
*
* @param xml the XML representation of an object.
* @return the object unmarshalled from the given input.
* @throws JAXBException if an error occurred during the unmarshalling.
*/
public static Object unmarshal(final String xml) throws JAXBException {
ensureNonNull("input", xml);
final StringReader in = new StringReader(xml);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final Object object = unmarshaller.unmarshal(in);
pool.recycle(unmarshaller);
return object;
}
/**
* Unmarshal an object from the given stream.
*
* @param input the stream from which to read a XML representation.
* @return the object unmarshalled from the given input.
* @throws JAXBException if an error occurred during the unmarshalling.
*/
public static Object unmarshal(final InputStream input) throws JAXBException {
ensureNonNull("input", input);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final Object object = unmarshaller.unmarshal(input);
pool.recycle(unmarshaller);
return object;
}
/**
* Unmarshal an object from the given URL.
*
* @param input the URL from which to read a XML representation.
* @return the object unmarshalled from the given input.
* @throws JAXBException if an error occurred during the unmarshalling.
*/
public static Object unmarshal(final URL input) throws JAXBException {
ensureNonNull("input", input);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final Object object = unmarshaller.unmarshal(input);
pool.recycle(unmarshaller);
return object;
}
/**
* Unmarshal an object from the given file.
*
* @param input the file from which to read a XML representation.
* @return the object unmarshalled from the given input.
* @throws JAXBException if an error occurred during the unmarshalling.
*/
public static Object unmarshal(final File input) throws JAXBException {
ensureNonNull("input", input);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller();
final Object object = unmarshaller.unmarshal(input);
pool.recycle(unmarshaller);
return object;
}
/**
* Unmarshal an object from the given path.
*
* @param input the path from which to read a XML representation.
* @return the object unmarshalled from the given input.
* @throws JAXBException if an error occurred during the unmarshalling.
*/
public static Object unmarshal(final Path input) throws JAXBException {
ensureNonNull("input", input);
final Object object;
try (InputStream in = new BufferedInputStream(Files.newInputStream(input, StandardOpenOption.READ))) {
object = unmarshal(URISource.create(in, input.toUri()), null);
} catch (URISyntaxException | IOException e) {
throw new JAXBException(Errors.format(Errors.Keys.CanNotRead_1, input), e);
}
return object;
}
/**
* Unmarshal an object from the given stream, DOM or other sources.
* Together with the {@linkplain #unmarshal(Source, Class, Map) Unmarshal by Declared Type} variant,
* this is the most flexible unmarshalling method provided in this {@code XML} class.
* The source is specified by the {@code input} argument implementation, for example
* {@link javax.xml.transform.stream.StreamSource} for reading from a file or input stream.
* The optional {@code properties} map can contain any key documented in this {@code XML} class,
* together with the keys documented in the <i>supported properties</i> section of the
* {@link Unmarshaller} class.
*
* @param input the file from which to read a XML representation.
* @param properties an optional map of properties to give to the unmarshaller, or {@code null} if none.
* @return the object unmarshalled from the given input.
* @throws JAXBException if a property has an illegal value, or if an error occurred during the unmarshalling.
*
* @since 0.4
*/
public static Object unmarshal(final Source input, final Map<String,?> properties) throws JAXBException {
ensureNonNull("input", input);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller(properties);
final Object object;
/*
* STAX sources are not handled by jakarta.xml.bind.helpers.AbstractUnmarshallerImpl implementation.
* We have to handle those cases ourselves. This workaround should be removed if a future JDK version handles
* those cases.
*/
if (input instanceof StAXSource) {
@Workaround(library = "JDK", version = "1.8")
final XMLStreamReader reader = ((StAXSource) input).getXMLStreamReader();
if (reader != null) {
object = unmarshaller.unmarshal(reader);
} else {
object = unmarshaller.unmarshal(((StAXSource) input).getXMLEventReader());
}
} else {
object = unmarshaller.unmarshal(input);
}
pool.recycle(unmarshaller);
return object;
}
/**
* Unmarshal an object from the given stream, DOM or other sources.
* Together with the {@linkplain #unmarshal(Source, Map) Unmarshal Global Root Element} variant,
* this is the most flexible unmarshalling method provided in this {@code XML} class.
* The source is specified by the {@code input} argument implementation, for example
* {@link javax.xml.transform.stream.StreamSource} for reading from a file or input stream.
* The optional {@code properties} map can contain any key documented in this {@code XML} class,
* together with the keys documented in the <i>supported properties</i> section of the
* {@link Unmarshaller} class.
*
* @param <T> compile-time value of the {@code declaredType} argument.
* @param input the file from which to read a XML representation.
* @param declaredType the JAXB mapped class of the object to unmarshal.
* @param properties an optional map of properties to give to the unmarshaller, or {@code null} if none.
* @return the object unmarshalled from the given input, wrapped in a JAXB element.
* @throws JAXBException if a property has an illegal value, or if an error occurred during the unmarshalling.
*
* @since 0.8
*/
public static <T> JAXBElement<T> unmarshal(final Source input, final Class<T> declaredType, final Map<String,?> properties)
throws JAXBException
{
ensureNonNull("input", input);
ensureNonNull("declaredType", declaredType);
final MarshallerPool pool = getPool();
final Unmarshaller unmarshaller = pool.acquireUnmarshaller(properties);
final JAXBElement<T> element;
if (input instanceof StAXSource) { // Same workaround as the one documented in above method.
@Workaround(library = "JDK", version = "1.8")
final XMLStreamReader reader = ((StAXSource) input).getXMLStreamReader();
if (reader != null) {
element = unmarshaller.unmarshal(reader, declaredType);
} else {
element = unmarshaller.unmarshal(((StAXSource) input).getXMLEventReader(), declaredType);
}
} else {
element = unmarshaller.unmarshal(input, declaredType);
}
pool.recycle(unmarshaller);
return element;
}
}
|
apache/storm | 35,569 | storm-server/src/main/java/org/apache/storm/localizer/AsyncLocalizer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.localizer;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import org.apache.storm.Config;
import org.apache.storm.DaemonConfig;
import org.apache.storm.blobstore.ClientBlobStore;
import org.apache.storm.daemon.supervisor.AdvancedFSOps;
import org.apache.storm.daemon.supervisor.SupervisorUtils;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.KeyNotFoundException;
import org.apache.storm.generated.LocalAssignment;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.metric.StormMetricsRegistry;
import org.apache.storm.shade.com.google.common.annotations.VisibleForTesting;
import org.apache.storm.shade.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.storm.thrift.transport.TTransportException;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.NimbusLeaderNotFoundException;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.ServerConfigUtils;
import org.apache.storm.utils.ServerUtils;
import org.apache.storm.utils.Utils;
import org.apache.storm.utils.WrappedKeyNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Downloads and caches blobs locally.
*/
public class AsyncLocalizer implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(AsyncLocalizer.class);
private static final CompletableFuture<Void> ALL_DONE_FUTURE = CompletableFuture.completedFuture(null);
private static final int ATTEMPTS_INTERVAL_TIME = 100;
private final Timer blobCacheUpdateDuration;
private final Timer blobLocalizationDuration;
private final Meter localResourceFileNotFoundWhenReleasingSlot;
private final Meter updateBlobExceptions;
// track resources - user to resourceSet
//ConcurrentHashMap is explicitly used everywhere in this class because it uses locks to guarantee atomicity for compute and
// computeIfAbsent where as ConcurrentMap allows for a retry of the function passed in, and would require the function to have
// no side effects.
protected final ConcurrentHashMap<String, ConcurrentHashMap<String, LocalizedResource>> userFiles = new ConcurrentHashMap<>();
protected final ConcurrentHashMap<String, ConcurrentHashMap<String, LocalizedResource>> userArchives = new ConcurrentHashMap<>();
private final boolean isLocalMode;
// topology to tracking of topology dir and resources
private final ConcurrentHashMap<String, CompletableFuture<Void>> blobPending;
private final Map<String, Object> conf;
private final AdvancedFSOps fsOps;
private final boolean symlinksDisabled;
private final ConcurrentHashMap<String, LocallyCachedBlob> topologyBlobs = new ConcurrentHashMap<>();
private final ConcurrentHashMap<String, CompletableFuture<Void>> topologyBasicDownloaded = new ConcurrentHashMap<>();
private final Path localBaseDir;
private final int blobDownloadRetries;
private final ScheduledExecutorService downloadExecService;
private final ScheduledExecutorService taskExecService;
private final long cacheCleanupPeriod;
private final int updateBlobPeriod;
private final StormMetricsRegistry metricsRegistry;
// cleanup
@VisibleForTesting
protected long cacheTargetSize;
@VisibleForTesting
AsyncLocalizer(Map<String, Object> conf, AdvancedFSOps ops, String baseDir, StormMetricsRegistry metricsRegistry) throws IOException {
this.conf = conf;
this.blobCacheUpdateDuration = metricsRegistry.registerTimer("supervisor:blob-cache-update-duration");
this.blobLocalizationDuration = metricsRegistry.registerTimer("supervisor:blob-localization-duration");
this.localResourceFileNotFoundWhenReleasingSlot
= metricsRegistry.registerMeter("supervisor:local-resource-file-not-found-when-releasing-slot");
this.updateBlobExceptions = metricsRegistry.registerMeter("supervisor:update-blob-exceptions");
this.metricsRegistry = metricsRegistry;
isLocalMode = ConfigUtils.isLocalMode(conf);
fsOps = ops;
localBaseDir = Paths.get(baseDir);
// default cache size 10GB, converted to Bytes
cacheTargetSize = ObjectReader.getInt(conf.get(DaemonConfig.SUPERVISOR_LOCALIZER_CACHE_TARGET_SIZE_MB),
10 * 1024).longValue() << 20;
// default 30 seconds. (we cache the size so it is cheap to do)
cacheCleanupPeriod = ObjectReader.getInt(conf.get(
DaemonConfig.SUPERVISOR_LOCALIZER_CACHE_CLEANUP_INTERVAL_MS), 30 * 1000).longValue();
updateBlobPeriod = ServerConfigUtils.getLocalizerUpdateBlobInterval(conf);
blobDownloadRetries = ObjectReader.getInt(conf.get(
DaemonConfig.SUPERVISOR_BLOBSTORE_DOWNLOAD_MAX_RETRIES), 3);
int downloadThreadPoolSize = ObjectReader.getInt(conf.get(DaemonConfig.SUPERVISOR_BLOBSTORE_DOWNLOAD_THREAD_COUNT), 5);
downloadExecService = Executors.newScheduledThreadPool(downloadThreadPoolSize,
new ThreadFactoryBuilder().setNameFormat("AsyncLocalizer Download Executor - %d").build());
taskExecService = Executors.newScheduledThreadPool(3,
new ThreadFactoryBuilder().setNameFormat("AsyncLocalizer Task Executor - %d").build());
reconstructLocalizedResources();
symlinksDisabled = (boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false);
blobPending = new ConcurrentHashMap<>();
}
public AsyncLocalizer(Map<String, Object> conf, StormMetricsRegistry metricsRegistry) throws IOException {
this(conf, AdvancedFSOps.make(conf), ConfigUtils.supervisorLocalDir(conf), metricsRegistry);
}
@VisibleForTesting
LocallyCachedBlob getTopoJar(final String topologyId, String owner) {
return topologyBlobs.computeIfAbsent(ConfigUtils.masterStormJarKey(topologyId),
(tjk) -> {
try {
return new LocallyCachedTopologyBlob(topologyId, isLocalMode, conf, fsOps,
LocallyCachedTopologyBlob.TopologyBlobType
.TOPO_JAR, owner, metricsRegistry);
} catch (IOException e) {
String message = "Failed getTopoJar for " + topologyId;
LOG.error(message, e);
throw new RuntimeException(message, e);
}
});
}
@VisibleForTesting
LocallyCachedBlob getTopoCode(final String topologyId, String owner) {
return topologyBlobs.computeIfAbsent(ConfigUtils.masterStormCodeKey(topologyId),
(tck) -> {
try {
return new LocallyCachedTopologyBlob(topologyId, isLocalMode, conf, fsOps,
LocallyCachedTopologyBlob.TopologyBlobType
.TOPO_CODE, owner, metricsRegistry);
} catch (IOException e) {
String message = "Failed getTopoCode for " + topologyId;
LOG.error(message, e);
throw new RuntimeException(message, e);
}
});
}
@VisibleForTesting
LocallyCachedBlob getTopoConf(final String topologyId, String owner) {
return topologyBlobs.computeIfAbsent(ConfigUtils.masterStormConfKey(topologyId),
(tck) -> {
try {
return new LocallyCachedTopologyBlob(topologyId, isLocalMode, conf, fsOps,
LocallyCachedTopologyBlob.TopologyBlobType
.TOPO_CONF, owner, metricsRegistry);
} catch (IOException e) {
String message = "Failed getTopoConf for " + topologyId;
LOG.error(message, e);
throw new RuntimeException(message, e);
}
});
}
private LocalizedResource getUserArchive(String user, String key) {
if (user == null) {
throw new AssertionError("All user archives require a user present");
}
ConcurrentMap<String, LocalizedResource> keyToResource = userArchives.computeIfAbsent(user, (u) -> new ConcurrentHashMap<>());
return keyToResource.computeIfAbsent(key,
(k) -> new LocalizedResource(key, localBaseDir, true, fsOps, conf, user, metricsRegistry));
}
private LocalizedResource getUserFile(String user, String key) {
if (user == null) {
throw new AssertionError("All user archives require a user present");
}
ConcurrentMap<String, LocalizedResource> keyToResource = userFiles.computeIfAbsent(user, (u) -> new ConcurrentHashMap<>());
return keyToResource.computeIfAbsent(key,
(k) -> new LocalizedResource(key, localBaseDir, false, fsOps, conf, user, metricsRegistry));
}
/**
* Request that all of the blobs necessary for this topology be downloaded. Note that this adds references to
* blobs asynchronously in background threads.
*
* @param assignment the assignment that needs the blobs
* @param port the port the assignment is a part of
* @param cb a callback for when the blobs change. This is only for blobs that are tied to the lifetime of the worker.
* @return a Future that indicates when they are all downloaded.
*
* @throws IOException if there was an error while trying doing it.
*/
public CompletableFuture<Void> requestDownloadTopologyBlobs(final LocalAssignment assignment, final int port,
final BlobChangingCallback cb) throws IOException {
final PortAndAssignment pna = new TimePortAndAssignment(new PortAndAssignmentImpl(port, assignment), blobLocalizationDuration);
final String topologyId = pna.getToplogyId();
LOG.info("requestDownloadTopologyBlobs for {}", pna);
CompletableFuture<Void> baseBlobs = requestDownloadBaseTopologyBlobs(pna, cb);
return baseBlobs.thenComposeAsync((v) ->
blobPending.compute(topologyId, (tid, old) -> {
CompletableFuture<Void> ret = old;
if (ret == null) {
ret = CompletableFuture.supplyAsync(new DownloadBlobs(pna, cb), taskExecService);
} else {
try {
addReferencesToBlobs(pna, cb);
} catch (Exception e) {
LOG.error("Failed adding references to blobs for " + pna, e);
throw new RuntimeException(e);
} finally {
pna.complete();
}
}
LOG.debug("Reserved blobs {} {}", topologyId, ret);
return ret;
}));
}
@VisibleForTesting
CompletableFuture<Void> requestDownloadBaseTopologyBlobs(PortAndAssignment pna, BlobChangingCallback cb) {
final String topologyId = pna.getToplogyId();
final LocallyCachedBlob topoJar = getTopoJar(topologyId, pna.getAssignment().get_owner());
topoJar.addReference(pna, cb);
final LocallyCachedBlob topoCode = getTopoCode(topologyId, pna.getAssignment().get_owner());
topoCode.addReference(pna, cb);
final LocallyCachedBlob topoConf = getTopoConf(topologyId, pna.getAssignment().get_owner());
topoConf.addReference(pna, cb);
return topologyBasicDownloaded.computeIfAbsent(topologyId,
(tid) -> downloadOrUpdate(topoJar, topoCode, topoConf));
}
private CompletableFuture<Void> downloadOrUpdate(LocallyCachedBlob... blobs) {
return downloadOrUpdate(Arrays.asList(blobs));
}
private CompletableFuture<Void> downloadOrUpdate(Collection<? extends LocallyCachedBlob> blobs) {
final long remoteBlobstoreUpdateTime = getRemoteBlobstoreUpdateTime();
CompletableFuture<Void>[] all = new CompletableFuture[blobs.size()];
int i = 0;
for (final LocallyCachedBlob blob : blobs) {
all[i] = CompletableFuture.runAsync(() -> {
LOG.debug("STARTING download of {}", blob);
try (ClientBlobStore blobStore = getClientBlobStore()) {
boolean done = false;
long failures = 0;
while (!done) {
try {
blob.update(blobStore, remoteBlobstoreUpdateTime);
done = true;
} catch (Exception e) {
failures++;
if (failures > blobDownloadRetries) {
throw new RuntimeException("Could not download...", e);
}
LOG.warn("Failed to download blob {} will try again in {} ms", blob, ATTEMPTS_INTERVAL_TIME, e);
Utils.sleep(ATTEMPTS_INTERVAL_TIME);
}
}
}
LOG.debug("FINISHED download of {}", blob);
}, downloadExecService);
i++;
}
return CompletableFuture.allOf(all);
}
private long getRemoteBlobstoreUpdateTime() {
try (ClientBlobStore blobStore = getClientBlobStore()) {
try {
return blobStore.getRemoteBlobstoreUpdateTime();
} catch (IOException e) {
LOG.error("Failed to get remote blobstore update time", e);
return -1L;
}
}
}
/**
* Downloads all blobs listed in the topology configuration for all topologies assigned to this supervisor, and creates version files
* with a suffix. The runnable is intended to be run periodically by a timer, created elsewhere.
*/
@VisibleForTesting
void updateBlobs() {
try (Timer.Context t = blobCacheUpdateDuration.time()) {
List<CompletableFuture<?>> futures = new ArrayList<>();
futures.add(downloadOrUpdate(topologyBlobs.values()));
if (symlinksDisabled) {
LOG.warn("symlinks are disabled so blobs cannot be downloaded.");
} else {
for (ConcurrentMap<String, LocalizedResource> map : userArchives.values()) {
futures.add(downloadOrUpdate(map.values()));
}
for (ConcurrentMap<String, LocalizedResource> map : userFiles.values()) {
futures.add(downloadOrUpdate(map.values()));
}
}
for (CompletableFuture<?> f : futures) {
f.get();
}
} catch (Exception e) {
updateBlobExceptions.mark();
LOG.warn("Could not update blob ({}), will retry again later.", e.getClass().getName());
}
}
/**
* Start any background threads needed. This includes updating blobs and cleaning up unused blobs over the configured size limit.
*/
public void start() {
LOG.debug("Scheduling updateBlobs every {} seconds", updateBlobPeriod);
taskExecService.scheduleWithFixedDelay(this::updateBlobs, updateBlobPeriod, updateBlobPeriod, TimeUnit.SECONDS);
LOG.debug("Scheduling cleanup every {} millis", cacheCleanupPeriod);
taskExecService.scheduleAtFixedRate(this::cleanup, cacheCleanupPeriod, cacheCleanupPeriod, TimeUnit.MILLISECONDS);
}
@Override
public void close() throws InterruptedException {
downloadExecService.shutdown();
taskExecService.shutdown();
}
private List<LocalResource> getLocalResources(PortAndAssignment pna) throws IOException {
String topologyId = pna.getToplogyId();
Map<String, Object> topoConf = ConfigUtils.readSupervisorStormConf(conf, topologyId);
@SuppressWarnings("unchecked")
Map<String, Map<String, Object>> blobstoreMap = (Map<String, Map<String, Object>>) topoConf.get(Config.TOPOLOGY_BLOBSTORE_MAP);
List<LocalResource> ret = new ArrayList<>();
if (blobstoreMap != null) {
List<LocalResource> tmp = SupervisorUtils.blobstoreMapToLocalresources(blobstoreMap);
if (tmp != null) {
ret.addAll(tmp);
}
}
StormTopology stormCode = ConfigUtils.readSupervisorTopology(conf, topologyId, fsOps);
List<String> dependencies = new ArrayList<>();
if (stormCode.is_set_dependency_jars()) {
dependencies.addAll(stormCode.get_dependency_jars());
}
if (stormCode.is_set_dependency_artifacts()) {
dependencies.addAll(stormCode.get_dependency_artifacts());
}
for (String dependency : dependencies) {
ret.add(new LocalResource(dependency, false, true));
}
return ret;
}
@VisibleForTesting
void addReferencesToBlobs(PortAndAssignment pna, BlobChangingCallback cb)
throws IOException, KeyNotFoundException, AuthorizationException {
List<LocalResource> localResourceList = getLocalResources(pna);
if (!localResourceList.isEmpty()) {
getBlobs(localResourceList, pna, cb);
}
}
/**
* Do everything needed to recover the state in the AsyncLocalizer for a running topology.
*
* @param currentAssignment the assignment for the topology.
* @param port the port the assignment is on.
* @param cb a callback for when the blobs are updated. This will only be for blobs that indicate that if they change
* the worker should be restarted.
* @throws IOException on any error trying to recover the state.
*/
public void recoverRunningTopology(final LocalAssignment currentAssignment, final int port,
final BlobChangingCallback cb) throws IOException {
final PortAndAssignment pna = new PortAndAssignmentImpl(port, currentAssignment);
final String topologyId = pna.getToplogyId();
LOG.info("recoverRunningTopology for {}", pna);
LocallyCachedBlob topoJar = getTopoJar(topologyId, pna.getAssignment().get_owner());
topoJar.addReference(pna, cb);
LocallyCachedBlob topoCode = getTopoCode(topologyId, pna.getAssignment().get_owner());
topoCode.addReference(pna, cb);
LocallyCachedBlob topoConf = getTopoConf(topologyId, pna.getAssignment().get_owner());
topoConf.addReference(pna, cb);
CompletableFuture<Void> localResource = blobPending.computeIfAbsent(topologyId, (tid) -> ALL_DONE_FUTURE);
try {
addReferencesToBlobs(pna, cb);
} catch (KeyNotFoundException | AuthorizationException e) {
LOG.error("Could not recover all blob references for {}", pna);
}
LOG.debug("Recovered blobs {} {}", topologyId, localResource);
}
/**
* Remove this assignment/port as blocking resources from being cleaned up.
*
* @param assignment the assignment the resources are for
* @param port the port the topology is running on
* @throws IOException on any error
*/
public void releaseSlotFor(LocalAssignment assignment, int port) throws IOException {
PortAndAssignment pna = new PortAndAssignmentImpl(port, assignment);
final String topologyId = assignment.get_topology_id();
LOG.info("Releasing slot for {} {}", topologyId, port);
String topoJarKey = ConfigUtils.masterStormJarKey(topologyId);
String topoCodeKey = ConfigUtils.masterStormCodeKey(topologyId);
String topoConfKey = ConfigUtils.masterStormConfKey(topologyId);
LocallyCachedBlob topoJar = topologyBlobs.get(topoJarKey);
if (topoJar != null) {
topoJar.removeReference(pna);
}
LocallyCachedBlob topoCode = topologyBlobs.get(topoCodeKey);
if (topoCode != null) {
topoCode.removeReference(pna);
}
LocallyCachedBlob topoConfBlob = topologyBlobs.get(topoConfKey);
if (topoConfBlob != null) {
topoConfBlob.removeReference(pna);
}
List<LocalResource> localResources;
try {
// Precondition1: Base blob stormconf.ser and stormcode.ser are available
// Precondition2: Both files have proper permission
localResources = getLocalResources(pna);
} catch (IOException e) {
LOG.info("Port and assignment info: {}", pna);
if (e instanceof FileNotFoundException) {
localResourceFileNotFoundWhenReleasingSlot.mark();
LOG.warn("Local base blobs are not available. ", e);
return;
} else {
LOG.error("Unable to read local file. ", e);
throw e;
}
}
for (LocalResource lr : localResources) {
removeBlobReference(lr.getBlobName(), pna, lr.shouldUncompress());
}
}
// baseDir/supervisor/usercache/user1/
@VisibleForTesting
File getLocalUserDir(String userName) {
return LocalizedResource.getLocalUserDir(localBaseDir, userName).toFile();
}
// baseDir/supervisor/usercache/user1/filecache
@VisibleForTesting
File getLocalUserFileCacheDir(String userName) {
return LocalizedResource.getLocalUserFileCacheDir(localBaseDir, userName).toFile();
}
private void recoverLocalizedArchivesForUser(String user) throws IOException {
for (String key : LocalizedResource.getLocalizedArchiveKeys(localBaseDir, user)) {
getUserArchive(user, key);
}
}
private void recoverLocalizedFilesForUser(String user) throws IOException {
for (String key : LocalizedResource.getLocalizedFileKeys(localBaseDir, user)) {
getUserFile(user, key);
}
}
// Check to see if there are any existing files already localized.
private void reconstructLocalizedResources() {
try {
LOG.info("Reconstruct localized resources");
Collection<String> users = LocalizedResource.getLocalizedUsers(localBaseDir);
if (!(users == null || users.isEmpty())) {
for (String user : users) {
LOG.debug("reconstructing resources owned by {}", user);
recoverLocalizedFilesForUser(user);
recoverLocalizedArchivesForUser(user);
}
} else {
LOG.debug("No left over resources found for any user");
}
} catch (Exception e) {
LOG.error("ERROR reconstructing localized resources", e);
}
}
// ignores invalid user/topo/key
void removeBlobReference(String key, PortAndAssignment pna, boolean uncompress) {
String user = pna.getOwner();
String topo = pna.getToplogyId();
ConcurrentMap<String, LocalizedResource> lrsrcSet = uncompress ? userArchives.get(user) : userFiles.get(user);
if (lrsrcSet != null) {
LocalizedResource lrsrc = lrsrcSet.get(key);
if (lrsrc != null) {
LOG.debug("removing blob reference to: {} for topo: {}", key, topo);
lrsrc.removeReference(pna);
} else {
LOG.warn("trying to remove non-existent blob, key: " + key + " for user: " + user
+ " topo: " + topo);
}
} else {
LOG.warn("trying to remove blob for non-existent resource set for user: " + user + " key: "
+ key + " topo: " + topo);
}
}
protected ClientBlobStore getClientBlobStore() {
return ServerUtils.getClientBlobStoreForSupervisor(conf);
}
/**
* This function either returns the blobs in the existing cache or if they don't exist in the cache, it downloads them in parallel (up
* to SUPERVISOR_BLOBSTORE_DOWNLOAD_THREAD_COUNT) and will block until all of them have been downloaded.
*/
List<LocalizedResource> getBlobs(List<LocalResource> localResources, PortAndAssignment pna, BlobChangingCallback cb)
throws AuthorizationException, KeyNotFoundException, IOException {
if ((boolean) conf.getOrDefault(Config.DISABLE_SYMLINKS, false)) {
throw new WrappedKeyNotFoundException("symlinks are disabled so blobs cannot be downloaded.");
}
String user = pna.getOwner();
ArrayList<LocalizedResource> results = new ArrayList<>();
List<CompletableFuture<?>> futures = new ArrayList<>();
try {
for (LocalResource localResource : localResources) {
String key = localResource.getBlobName();
boolean uncompress = localResource.shouldUncompress();
LocalizedResource lrsrc = uncompress ? getUserArchive(user, key) : getUserFile(user, key);
// go off to blobstore and get it
// assume dir passed in exists and has correct permission
LOG.debug("fetching blob: {}", key);
lrsrc.addReference(pna, localResource.needsCallback() ? cb : null);
futures.add(downloadOrUpdate(lrsrc));
results.add(lrsrc);
}
for (CompletableFuture<?> futureRsrc : futures) {
futureRsrc.get();
}
} catch (ExecutionException e) {
Utils.unwrapAndThrow(AuthorizationException.class, e);
Utils.unwrapAndThrow(KeyNotFoundException.class, e);
throw new IOException("Error getting blobs", e);
} catch (RejectedExecutionException re) {
throw new IOException("RejectedExecutionException: ", re);
} catch (InterruptedException ie) {
throw new IOException("Interrupted Exception", ie);
}
return results;
}
private void forEachTopologyDistDir(ConsumePathAndId consumer) throws IOException {
Path stormCodeRoot = Paths.get(ConfigUtils.supervisorStormDistRoot(conf));
if (Files.exists(stormCodeRoot) && Files.isDirectory(stormCodeRoot)) {
try (DirectoryStream<Path> children = Files.newDirectoryStream(stormCodeRoot)) {
for (Path child : children) {
if (Files.isDirectory(child)) {
String topologyId = child.getFileName().toString();
consumer.accept(child, topologyId);
}
}
}
}
}
@VisibleForTesting
void cleanup() {
try {
LOG.info("Starting cleanup");
LocalizedResourceRetentionSet toClean = new LocalizedResourceRetentionSet(cacheTargetSize);
// need one large set of all and then clean via LRU
for (Map.Entry<String, ConcurrentHashMap<String, LocalizedResource>> t : userArchives.entrySet()) {
toClean.addResources(t.getValue());
LOG.debug("Resources to be cleaned after adding {} archives : {}", t.getKey(), toClean);
}
for (Map.Entry<String, ConcurrentHashMap<String, LocalizedResource>> t : userFiles.entrySet()) {
toClean.addResources(t.getValue());
LOG.debug("Resources to be cleaned after adding {} files : {}", t.getKey(), toClean);
}
toClean.addResources(topologyBlobs);
Set<String> topologiesWithDeletes = new HashSet<>();
try (ClientBlobStore store = getClientBlobStore()) {
Set<LocallyCachedBlob> deletedBlobs = toClean.cleanup(store);
for (LocallyCachedBlob deletedBlob : deletedBlobs) {
String topologyId = ConfigUtils.getIdFromBlobKey(deletedBlob.getKey());
if (topologyId != null) {
topologiesWithDeletes.add(topologyId);
}
}
}
HashSet<String> safeTopologyIds = new HashSet<>();
for (String blobKey : topologyBlobs.keySet()) {
safeTopologyIds.add(ConfigUtils.getIdFromBlobKey(blobKey));
}
LOG.debug("Topologies {} can no longer be considered fully downloaded", topologiesWithDeletes);
safeTopologyIds.removeAll(topologiesWithDeletes);
//Deleting this early does not hurt anything
topologyBasicDownloaded.keySet().removeIf(topoId -> !safeTopologyIds.contains(topoId));
blobPending.keySet().removeIf(topoId -> !safeTopologyIds.contains(topoId));
try {
forEachTopologyDistDir((p, topologyId) -> {
String topoJarKey = ConfigUtils.masterStormJarKey(topologyId);
String topoCodeKey = ConfigUtils.masterStormCodeKey(topologyId);
String topoConfKey = ConfigUtils.masterStormConfKey(topologyId);
if (!topologyBlobs.containsKey(topoJarKey)
&& !topologyBlobs.containsKey(topoCodeKey)
&& !topologyBlobs.containsKey(topoConfKey)) {
fsOps.deleteIfExists(p.toFile());
}
});
} catch (Exception e) {
LOG.error("Could not read topology directories for cleanup", e);
}
LOG.debug("Resource cleanup: {}", toClean);
Set<String> allUsers = new HashSet<>(userArchives.keySet());
allUsers.addAll(userFiles.keySet());
for (String user : allUsers) {
ConcurrentMap<String, LocalizedResource> filesForUser = userFiles.get(user);
ConcurrentMap<String, LocalizedResource> archivesForUser = userArchives.get(user);
if ((filesForUser == null || filesForUser.size() == 0)
&& (archivesForUser == null || archivesForUser.size() == 0)) {
LOG.debug("removing empty set: {}", user);
try {
LocalizedResource.completelyRemoveUnusedUser(localBaseDir, user);
userFiles.remove(user);
userArchives.remove(user);
} catch (IOException e) {
LOG.error("Error trying to delete cached user files", e);
}
}
}
} catch (Exception ex) {
LOG.error("AsyncLocalizer cleanup failure", ex);
} catch (Error error) {
LOG.error("AsyncLocalizer cleanup failure", error);
Utils.exitProcess(20, "AsyncLocalizer cleanup failure");
} finally {
LOG.info("Finish cleanup");
}
}
private interface ConsumePathAndId {
void accept(Path path, String topologyId) throws IOException;
}
private class DownloadBlobs implements Supplier<Void> {
private final PortAndAssignment pna;
private final BlobChangingCallback cb;
DownloadBlobs(PortAndAssignment pna, BlobChangingCallback cb) {
this.pna = pna;
this.cb = cb;
}
@Override
public Void get() {
try {
String topologyId = pna.getToplogyId();
String topoOwner = pna.getOwner();
String stormroot = ConfigUtils.supervisorStormDistRoot(conf, topologyId);
Map<String, Object> topoConf = ConfigUtils.readSupervisorStormConf(conf, topologyId);
@SuppressWarnings("unchecked")
Map<String, Map<String, Object>> blobstoreMap =
(Map<String, Map<String, Object>>) topoConf.get(Config.TOPOLOGY_BLOBSTORE_MAP);
List<LocalResource> localResourceList = getLocalResources(pna);
if (!localResourceList.isEmpty()) {
File userDir = getLocalUserFileCacheDir(topoOwner);
if (!fsOps.fileExists(userDir)) {
fsOps.forceMkdir(userDir);
}
List<LocalizedResource> localizedResources = getBlobs(localResourceList, pna, cb);
fsOps.setupBlobPermissions(userDir, topoOwner);
if (!symlinksDisabled) {
for (LocalizedResource localizedResource : localizedResources) {
String keyName = localizedResource.getKey();
//The sym link we are pointing to
File rsrcFilePath = localizedResource.getCurrentSymlinkPath().toFile();
String symlinkName = null;
if (blobstoreMap != null) {
Map<String, Object> blobInfo = blobstoreMap.get(keyName);
if (blobInfo != null && blobInfo.containsKey("localname")) {
symlinkName = (String) blobInfo.get("localname");
} else {
symlinkName = keyName;
}
} else {
// all things are from dependencies
symlinkName = keyName;
}
fsOps.createSymlink(new File(stormroot, symlinkName), rsrcFilePath);
}
}
}
pna.complete();
return null;
} catch (Exception e) {
LOG.warn("Caught Exception While Downloading (rethrowing)... ", e);
throw new RuntimeException(e);
}
}
}
}
|
google/guava | 35,577 | guava/src/com/google/common/collect/FluentIterable.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.InlineMe;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* A discouraged (but not deprecated) precursor to Java's superior {@link Stream} library.
*
* <p>The following types of methods are provided:
*
* <ul>
* <li>chaining methods which return a new {@code FluentIterable} based in some way on the
* contents of the current one (for example {@link #transform})
* <li>element extraction methods which facilitate the retrieval of certain elements (for example
* {@link #last})
* <li>query methods which answer questions about the {@code FluentIterable}'s contents (for
* example {@link #anyMatch})
* <li>conversion methods which copy the {@code FluentIterable}'s contents into a new collection
* or array (for example {@link #toList})
* </ul>
*
* <p>Several lesser-used features are currently available only as static methods on the {@link
* Iterables} class.
*
* <p><a id="streams"></a>
*
* <h3>Comparison to streams</h3>
*
* <p>{@link Stream} is similar to this class, but generally more powerful, and certainly more
* standard. Key differences include:
*
* <ul>
* <li>A stream is <i>single-use</i>; it becomes invalid as soon as any "terminal operation" such
* as {@code findFirst()} or {@code iterator()} is invoked. (Even though {@code Stream}
* contains all the right method <i>signatures</i> to implement {@link Iterable}, it does not
* actually do so, to avoid implying repeat-iterability.) {@code FluentIterable}, on the other
* hand, is multiple-use, and does implement {@link Iterable}.
* <li>Streams offer many features not found here, including {@code min/max}, {@code distinct},
* {@code reduce}, {@code sorted}, the very powerful {@code collect}, and built-in support for
* parallelizing stream operations.
* <li>{@code FluentIterable} contains several features not available on {@code Stream}, which are
* noted in the method descriptions below.
* <li>Streams include primitive-specialized variants such as {@code IntStream}, the use of which
* is strongly recommended.
* <li>Streams are standard Java, not requiring a third-party dependency.
* </ul>
*
* <h3>Example</h3>
*
* <p>Here is an example that accepts a list from a database call, filters it based on a predicate,
* transforms it by invoking {@code toString()} on each element, and returns the first 10 elements
* as a {@code List}:
*
* {@snippet :
* ImmutableList<String> results =
* FluentIterable.from(database.getClientList())
* .filter(Client::isActiveInLastMonth)
* .transform(Object::toString)
* .limit(10)
* .toList();
* }
*
* The approximate stream equivalent is:
*
* {@snippet :
* List<String> results =
* database.getClientList()
* .stream()
* .filter(Client::isActiveInLastMonth)
* .map(Object::toString)
* .limit(10)
* .collect(Collectors.toList());
* }
*
* @author Marcin Mikosik
* @since 12.0
*/
@GwtCompatible
public abstract class FluentIterable<E extends @Nullable Object> implements Iterable<E> {
// We store 'iterable' and use it instead of 'this' to allow Iterables to perform instanceof
// checks on the _original_ iterable when FluentIterable.from is used.
// To avoid a self retain cycle under j2objc, we store Optional.absent() instead of
// Optional.of(this). To access the delegate iterable, call #getDelegate(), which converts to
// absent() back to 'this'.
private final Optional<Iterable<E>> iterableDelegate;
/** Constructor for use by subclasses. */
protected FluentIterable() {
this.iterableDelegate = Optional.absent();
}
FluentIterable(Iterable<E> iterable) {
this.iterableDelegate = Optional.of(iterable);
}
private Iterable<E> getDelegate() {
return iterableDelegate.or(this);
}
/**
* Returns a fluent iterable that wraps {@code iterable}, or {@code iterable} itself if it is
* already a {@code FluentIterable}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Collection#stream} if {@code iterable} is a {@link
* Collection}; {@link Streams#stream(Iterable)} otherwise.
*/
public static <E extends @Nullable Object> FluentIterable<E> from(Iterable<E> iterable) {
return (iterable instanceof FluentIterable)
? (FluentIterable<E>) iterable
: new FluentIterable<E>(iterable) {
@Override
public Iterator<E> iterator() {
return iterable.iterator();
}
};
}
/**
* Returns a fluent iterable containing {@code elements} in the specified order.
*
* <p>The returned iterable is an unmodifiable view of the input array.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0 (since 18.0 as an overload of {@code of})
*/
public static <E extends @Nullable Object> FluentIterable<E> from(E[] elements) {
return from(Arrays.asList(elements));
}
/**
* Construct a fluent iterable from another fluent iterable. This is obviously never necessary,
* but is intended to help call out cases where one migration from {@code Iterable} to {@code
* FluentIterable} has obviated the need to explicitly convert to a {@code FluentIterable}.
*
* @deprecated instances of {@code FluentIterable} don't need to be converted to {@code
* FluentIterable}
*/
@Deprecated
@InlineMe(
replacement = "checkNotNull(iterable)",
staticImports = {"com.google.common.base.Preconditions.checkNotNull"})
public static <E extends @Nullable Object> FluentIterable<E> from(FluentIterable<E> iterable) {
return checkNotNull(iterable);
}
/**
* Returns a fluent iterable that combines two iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}. The source
* iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b) {
return concatNoDefensiveCopy(a, b);
}
/**
* Returns a fluent iterable that combines three iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}. The source iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) {
return concatNoDefensiveCopy(a, b, c);
}
/**
* Returns a fluent iterable that combines four iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}, followed by the elements in {@code d}. The source iterators are not
* polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a,
Iterable<? extends T> b,
Iterable<? extends T> c,
Iterable<? extends T> d) {
return concatNoDefensiveCopy(a, b, c, d);
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> to concatenate an arbitrary number of streams, use {@code
* Stream.of(stream1, stream2, ...).flatMap(s -> s)}. If the sources are iterables, use {@code
* Stream.of(iter1, iter2, ...).flatMap(Streams::stream)}.
*
* @throws NullPointerException if any of the provided iterables is {@code null}
* @since 20.0
*/
@SafeVarargs
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T>... inputs) {
return concatNoDefensiveCopy(Arrays.copyOf(inputs, inputs.length));
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it. The methods of the returned iterable may throw {@code
* NullPointerException} if any of the input iterators is {@code null}.
*
* <p><b>{@code Stream} equivalent:</b> {@code streamOfStreams.flatMap(s -> s)} or {@code
* streamOfIterables.flatMap(Streams::stream)}. (See {@link Streams#stream}.)
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends Iterable<? extends T>> inputs) {
checkNotNull(inputs);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(Iterators.transform(inputs.iterator(), Iterable::iterator));
}
};
}
/** Concatenates a varargs array of iterables without making a defensive copy of the array. */
private static <T extends @Nullable Object> FluentIterable<T> concatNoDefensiveCopy(
Iterable<? extends T>... inputs) {
for (Iterable<? extends T> input : inputs) {
checkNotNull(input);
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(
/* lazily generate the iterators on each input only as needed */
new AbstractIndexedListIterator<Iterator<? extends T>>(inputs.length) {
@Override
public Iterator<? extends T> get(int i) {
return inputs[i].iterator();
}
});
}
};
}
/**
* Returns a fluent iterable containing no elements.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#empty}.
*
* @since 20.0
*/
@SuppressWarnings("EmptyList") // ImmutableList doesn't support nullable element types
public static <E extends @Nullable Object> FluentIterable<E> of() {
return FluentIterable.from(Collections.emptyList());
}
/**
* Returns a fluent iterable containing the specified elements in order.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0
*/
public static <E extends @Nullable Object> FluentIterable<E> of(
@ParametricNullness E element, E... elements) {
return from(Lists.asList(element, elements));
}
/**
* Returns a string representation of this fluent iterable, with the format {@code [e1, e2, ...,
* en]}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(Collectors.joining(", ", "[", "]"))}
* or (less efficiently) {@code stream.collect(Collectors.toList()).toString()}.
*/
@Override
public String toString() {
return Iterables.toString(getDelegate());
}
/**
* Returns the number of elements in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#count}.
*/
public final int size() {
return Iterables.size(getDelegate());
}
/**
* Returns {@code true} if this fluent iterable contains any object for which {@code
* equals(target)} is true.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.anyMatch(Predicate.isEqual(target))}.
*/
public final boolean contains(@Nullable Object target) {
return Iterables.contains(getDelegate(), target);
}
/**
* Returns a fluent iterable whose {@code Iterator} cycles indefinitely over the elements of this
* fluent iterable.
*
* <p>That iterator supports {@code remove()} if {@code iterable.iterator()} does. After {@code
* remove()} is called, subsequent cycles omit the removed element, which is no longer in this
* fluent iterable. The iterator's {@code hasNext()} method returns {@code true} until this fluent
* iterable is empty.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You
* should use an explicit {@code break} or be certain that you will eventually remove all the
* elements.
*
* <p><b>{@code Stream} equivalent:</b> if the source iterable has only a single element {@code
* e}, use {@code Stream.generate(() -> e)}. Otherwise, collect your stream into a collection and
* use {@code Stream.generate(() -> collection).flatMap(Collection::stream)}.
*/
public final FluentIterable<E> cycle() {
return from(Iterables.cycle(getDelegate()));
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by those of {@code other}. The iterators are not polled until necessary.
*
* <p>The returned iterable's {@code Iterator} supports {@code remove()} when the corresponding
* {@code Iterator} supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 18.0
*/
public final FluentIterable<E> append(Iterable<? extends E> other) {
return FluentIterable.concat(getDelegate(), other);
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by {@code elements}.
*
* <p><b>{@code Stream} equivalent:</b> {@code Stream.concat(thisStream, Stream.of(elements))}.
*
* @since 18.0
*/
public final FluentIterable<E> append(E... elements) {
return FluentIterable.concat(getDelegate(), Arrays.asList(elements));
}
/**
* Returns the elements from this fluent iterable that satisfy a predicate. The resulting fluent
* iterable's iterator does not support {@code remove()}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#filter} (same).
*/
public final FluentIterable<E> filter(Predicate<? super E> predicate) {
return from(Iterables.filter(getDelegate(), predicate));
}
/**
* Returns the elements from this fluent iterable that are instances of class {@code type}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(type::isInstance).map(type::cast)}.
* This does perform a little more work than necessary, so another option is to insert an
* unchecked cast at some later point:
*
* {@snippet :
* @SuppressWarnings("unchecked") // safe because of ::isInstance check
* ImmutableList<NewType> result =
* (ImmutableList) stream.filter(NewType.class::isInstance).collect(toImmutableList());
* }
*/
@GwtIncompatible // Class.isInstance
public final <T> FluentIterable<T> filter(Class<T> type) {
return from(Iterables.filter(getDelegate(), type));
}
/**
* Returns {@code true} if any element in this fluent iterable satisfies the predicate.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#anyMatch} (same).
*/
public final boolean anyMatch(Predicate<? super E> predicate) {
return Iterables.any(getDelegate(), predicate);
}
/**
* Returns {@code true} if every element in this fluent iterable satisfies the predicate. If this
* fluent iterable is empty, {@code true} is returned.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#allMatch} (same).
*/
public final boolean allMatch(Predicate<? super E> predicate) {
return Iterables.all(getDelegate(), predicate);
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable that satisfies
* the given predicate, if such an element exists.
*
* <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code null}. If {@code null}
* is matched in this fluent iterable, a {@link NullPointerException} will be thrown.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(predicate).findFirst()}.
*/
public final Optional<@NonNull E> firstMatch(Predicate<? super E> predicate) {
// Unsafe, but we can't do much about it now.
return Iterables.<@NonNull E>tryFind((Iterable<@NonNull E>) getDelegate(), predicate);
}
/**
* Returns a fluent iterable that applies {@code function} to each element of this fluent
* iterable.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this iterable's
* iterator does. After a successful {@code remove()} call, this fluent iterable no longer
* contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#map}.
*/
public final <T extends @Nullable Object> FluentIterable<T> transform(
Function<? super E, T> function) {
return from(Iterables.transform(getDelegate(), function));
}
/**
* Applies {@code function} to each element of this fluent iterable and returns a fluent iterable
* with the concatenated combination of results. {@code function} returns an Iterable of results.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this function-returned
* iterables' iterator does. After a successful {@code remove()} call, the returned fluent
* iterable no longer contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#flatMap} (using a function that produces
* streams, not iterables).
*
* @since 13.0 (required {@code Function<E, Iterable<T>>} until 14.0)
*/
public <T extends @Nullable Object> FluentIterable<T> transformAndConcat(
Function<? super E, ? extends Iterable<? extends T>> function) {
return FluentIterable.concat(transform(function));
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned.
*
* <p><b>{@code Stream} equivalent:</b> if the goal is to obtain any element, {@link
* Stream#findAny}; if it must specifically be the <i>first</i> element, {@code Stream#findFirst}.
*
* @throws NullPointerException if the first element is null; if this is a possibility, use {@code
* iterator().next()} or {@link Iterables#getFirst} instead.
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final Optional<@NonNull E> first() {
Iterator<E> iterator = getDelegate().iterator();
return iterator.hasNext() ? Optional.of(iterator.next()) : Optional.absent();
}
/**
* Returns an {@link Optional} containing the last element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned. If the underlying {@code iterable} is
* a {@link List} with {@link java.util.RandomAccess} support, then this operation is guaranteed
* to be {@code O(1)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.reduce((a, b) -> b)}.
*
* @throws NullPointerException if the last element is null; if this is a possibility, use {@link
* Iterables#getLast} instead.
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final Optional<@NonNull E> last() {
// Iterables#getLast was inlined here so we don't have to throw/catch a NSEE
// TODO(kevinb): Support a concurrently modified collection?
Iterable<E> iterable = getDelegate();
if (iterable instanceof List) {
List<E> list = (List<E>) iterable;
if (list.isEmpty()) {
return Optional.absent();
}
return Optional.of(list.get(list.size() - 1));
}
Iterator<E> iterator = iterable.iterator();
if (!iterator.hasNext()) {
return Optional.absent();
}
/*
* TODO(kevinb): consider whether this "optimization" is worthwhile. Users with SortedSets tend
* to know they are SortedSets and probably would not call this method.
*/
if (iterable instanceof SortedSet) {
SortedSet<E> sortedSet = (SortedSet<E>) iterable;
return Optional.of(sortedSet.last());
}
while (true) {
E current = iterator.next();
if (!iterator.hasNext()) {
return Optional.of(current);
}
}
}
/**
* Returns a view of this fluent iterable that skips its first {@code numberToSkip} elements. If
* this fluent iterable contains fewer than {@code numberToSkip} elements, the returned fluent
* iterable skips all of its elements.
*
* <p>Modifications to this fluent iterable before a call to {@code iterator()} are reflected in
* the returned fluent iterable. That is, the iterator skips the first {@code numberToSkip}
* elements that exist when the iterator is created, not when {@code skip()} is called.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if the {@code Iterator} of
* this fluent iterable supports it. Note that it is <i>not</i> possible to delete the last
* skipped element by immediately calling {@code remove()} on the returned fluent iterable's
* iterator, as the {@code Iterator} contract states that a call to {@code * remove()} before a
* call to {@code next()} will throw an {@link IllegalStateException}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#skip} (same).
*/
public final FluentIterable<E> skip(int numberToSkip) {
return from(Iterables.skip(getDelegate(), numberToSkip));
}
/**
* Creates a fluent iterable with the first {@code size} elements of this fluent iterable. If this
* fluent iterable does not contain that many elements, the returned fluent iterable will have the
* same behavior as this fluent iterable. The returned fluent iterable's iterator supports {@code
* remove()} if this fluent iterable's iterator does.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#limit} (same).
*
* @param maxSize the maximum number of elements in the returned fluent iterable
* @throws IllegalArgumentException if {@code size} is negative
*/
public final FluentIterable<E> limit(int maxSize) {
return from(Iterables.limit(getDelegate(), maxSize));
}
/**
* Determines whether this fluent iterable is empty.
*
* <p><b>{@code Stream} equivalent:</b> {@code !stream.findAny().isPresent()}.
*/
public final boolean isEmpty() {
return !getDelegate().iterator().hasNext();
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this fluent iterable in
* proper sequence.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableList#toImmutableList} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableList()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableList<@NonNull E> toList() {
return ImmutableList.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}. To produce an {@code
* ImmutableList} sorted by its natural ordering, use {@code toSortedList(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableList#toImmutableList} to {@code
* stream.sorted(comparator).collect()}.
*
* @param comparator the function by which to sort list elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 13.0 as {@code toSortedImmutableList()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableList<@NonNull E> toSortedList(Comparator<? super E> comparator) {
return Ordering.from(comparator).immutableSortedCopy((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableSet} containing all of the elements from this fluent iterable with
* duplicates removed.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableSet#toImmutableSet} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSet()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableSet<@NonNull E> toSet() {
return ImmutableSet.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableSortedSet} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}, with duplicates (determined by
* {@code comparator.compare(x, y) == 0}) removed. To produce an {@code ImmutableSortedSet} sorted
* by its natural ordering, use {@code toSortedSet(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableSortedSet#toImmutableSortedSet} to
* {@code stream.collect()}.
*
* @param comparator the function by which to sort set elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSortedSet()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableSortedSet<@NonNull E> toSortedSet(Comparator<? super E> comparator) {
return ImmutableSortedSet.copyOf(comparator, (Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableMultiset} containing all of the elements from this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableMultiset#toImmutableMultiset} to
* {@code stream.collect()}.
*
* @throws NullPointerException if any element is null
* @since 19.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableMultiset<@NonNull E> toMultiset() {
return ImmutableMultiset.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an immutable map whose keys are the distinct elements of this {@code FluentIterable}
* and whose value for each key was computed by {@code valueFunction}. The map's iteration order
* is the order of the first appearance of each key in this iterable.
*
* <p>When there are multiple instances of a key in this iterable, it is unspecified whether
* {@code valueFunction} will be applied to more than one instance of that key and, if it is,
* which result will be mapped to that key in the returned map.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(ImmutableMap.toImmutableMap(k -> k,
* valueFunction))}.
*
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* valueFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <V> ImmutableMap<@NonNull E, V> toMap(Function<? super E, V> valueFunction) {
return Maps.toMap((Iterable<@NonNull E>) getDelegate(), valueFunction);
}
/**
* Creates an index {@code ImmutableListMultimap} that contains the results of applying a
* specified function to each item in this {@code FluentIterable} of values. Each element of this
* iterable will be stored as a value in the resulting multimap, yielding a multimap with the same
* size as this iterable. The key used to store that value in the multimap will be the result of
* calling the function on that value. The resulting multimap is created as an immutable snapshot.
* In the returned multimap, keys appear in the order they are first encountered, and the values
* corresponding to each key appear in the same order as they are encountered.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.collect(ImmutableListMultimap.toImmutableListMultimap(keyFunction, v -> v))}.
*
* @param keyFunction the function used to produce the key for each value
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <K> ImmutableListMultimap<K, @NonNull E> index(Function<? super E, K> keyFunction) {
return Multimaps.index((Iterable<@NonNull E>) getDelegate(), keyFunction);
}
/**
* Returns a map with the contents of this {@code FluentIterable} as its {@code values}, indexed
* by keys derived from those values. In other words, each input value produces an entry in the
* map whose key is the result of applying {@code keyFunction} to that value. These entries appear
* in the same order as they appeared in this fluent iterable. Example usage:
*
* {@snippet :
* Color red = new Color("red", 255, 0, 0);
* ...
* FluentIterable<Color> allColors = FluentIterable.from(ImmutableSet.of(red, green, blue));
*
* Map<String, Color> colorForName = allColors.uniqueIndex(toStringFunction());
* assertThat(colorForName).containsEntry("red", red);
* }
*
* <p>If your index may associate multiple values with each key, use {@link #index(Function)
* index}.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.collect(ImmutableMap.toImmutableMap(keyFunction, v -> v))}.
*
* @param keyFunction the function used to produce the key for each value
* @return a map mapping the result of evaluating the function {@code keyFunction} on each value
* in this fluent iterable to that value
* @throws IllegalArgumentException if {@code keyFunction} produces the same key for more than one
* value in this fluent iterable
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <K> ImmutableMap<K, @NonNull E> uniqueIndex(Function<? super E, K> keyFunction) {
return Maps.uniqueIndex((Iterable<@NonNull E>) getDelegate(), keyFunction);
}
/**
* Returns an array containing all of the elements from this fluent iterable in iteration order.
*
* <p><b>{@code Stream} equivalent:</b> if an object array is acceptable, use {@code
* stream.toArray()}; if {@code type} is a class literal such as {@code MyType.class}, use {@code
* stream.toArray(MyType[]::new)}. Otherwise use {@code stream.toArray( len -> (E[])
* Array.newInstance(type, len))}.
*
* @param type the type of the elements
* @return a newly-allocated array into which all the elements of this fluent iterable have been
* copied
*/
@GwtIncompatible // Array.newArray(Class, int)
public final E[] toArray(Class<@NonNull E> type) {
return Iterables.<E>toArray(getDelegate(), type);
}
/**
* Copies all the elements from this fluent iterable to {@code collection}. This is equivalent to
* calling {@code Iterables.addAll(collection, this)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.forEachOrdered(collection::add)} or {@code
* stream.forEach(collection::add)}.
*
* @param collection the collection to copy elements to
* @return {@code collection}, for convenience
* @since 14.0
*/
@CanIgnoreReturnValue
public final <C extends Collection<? super E>> C copyInto(C collection) {
checkNotNull(collection);
Iterable<E> iterable = getDelegate();
if (iterable instanceof Collection) {
collection.addAll((Collection<E>) iterable);
} else {
for (E item : iterable) {
collection.add(item);
}
}
return collection;
}
/**
* Returns a {@link String} containing all of the elements of this fluent iterable joined with
* {@code joiner}.
*
* <p><b>{@code Stream} equivalent:</b> {@code joiner.join(stream.iterator())}, or, if you are not
* using any optional {@code Joiner} features, {@code
* stream.collect(Collectors.joining(delimiter)}.
*
* @since 18.0
*/
public final String join(Joiner joiner) {
return joiner.join(this);
}
/**
* Returns the element at the specified position in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.skip(position).findFirst().get()} (but note
* that this throws different exception types, and throws an exception if {@code null} would be
* returned).
*
* @param position position of the element to return
* @return the element at the specified position in this fluent iterable
* @throws IndexOutOfBoundsException if {@code position} is negative or greater than or equal to
* the size of this fluent iterable
*/
@ParametricNullness
public final E get(int position) {
return Iterables.get(getDelegate(), position);
}
/**
* Returns a stream of this fluent iterable's contents (similar to calling {@link
* Collection#stream} on a collection).
*
* <p><b>Note:</b> the earlier in the chain you can switch to {@code Stream} usage (ideally not
* going through {@code FluentIterable} at all), the more performant and idiomatic your code will
* be. This method is a transitional aid, to be used only when really necessary.
*
* @since 21.0
*/
public final Stream<E> stream() {
return Streams.stream(getDelegate());
}
}
|
apache/phoenix-connectors | 35,330 | phoenix5-hive/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hive.util;
import java.lang.reflect.Field;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
/**
* TypeInfoUtils.
*
*/
public final class TypeInfoUtils {
public static List<PrimitiveCategory> numericTypeList = new ArrayList<PrimitiveCategory>();
// The ordering of types here is used to determine which numeric types
// are common/convertible to one another. Probably better to rely on the
// ordering explicitly defined here than to assume that the enum values
// that were arbitrarily assigned in PrimitiveCategory work for our purposes.
public static EnumMap<PrimitiveCategory, Integer> numericTypes =
new EnumMap<PrimitiveCategory, Integer>(PrimitiveCategory.class);
static {
registerNumericType(PrimitiveCategory.BYTE, 1);
registerNumericType(PrimitiveCategory.SHORT, 2);
registerNumericType(PrimitiveCategory.INT, 3);
registerNumericType(PrimitiveCategory.LONG, 4);
registerNumericType(PrimitiveCategory.FLOAT, 5);
registerNumericType(PrimitiveCategory.DOUBLE, 6);
registerNumericType(PrimitiveCategory.DECIMAL, 7);
registerNumericType(PrimitiveCategory.STRING, 8);
}
private TypeInfoUtils() {
// prevent instantiation
}
/**
* Return the extended TypeInfo from a Java type. By extended TypeInfo, we
* allow unknownType for java.lang.Object.
*
* @param t
* The Java type.
* @param m
* The method, only used for generating error messages.
*/
private static TypeInfo getExtendedTypeInfoFromJavaType(Type t, Method m) {
if (t == Object.class) {
return TypeInfoFactory.unknownTypeInfo;
}
if (t instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) t;
// List?
if (List.class == (Class<?>) pt.getRawType()
|| ArrayList.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getListTypeInfo(getExtendedTypeInfoFromJavaType(
pt.getActualTypeArguments()[0], m));
}
// Map?
if (Map.class == (Class<?>) pt.getRawType()
|| HashMap.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType(
pt.getActualTypeArguments()[0], m),
getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m));
}
// Otherwise convert t to RawType so we will fall into the following if
// block.
t = pt.getRawType();
}
// Must be a class.
if (!(t instanceof Class)) {
throw new RuntimeException("Hive does not understand type " + t
+ " from " + m);
}
Class<?> c = (Class<?>) t;
// Java Primitive Type?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaType(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveJavaType(c).primitiveCategory));
}
// Java Primitive Class?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory));
}
// Primitive Writable class?
if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory));
}
// Must be a struct
Field[] fields = ObjectInspectorUtils.getDeclaredNonStaticFields(c);
ArrayList<String> fieldNames = new ArrayList<String>(fields.length);
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.length);
for (Field field : fields) {
fieldNames.add(field.getName());
fieldTypeInfos.add(getExtendedTypeInfoFromJavaType(
field.getGenericType(), m));
}
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
/**
* Returns the array element type, if the Type is an array (Object[]), or
* GenericArrayType ({@code Map<String,String>[]}). Otherwise return null.
*/
public static Type getArrayElementType(Type t) {
if (t instanceof Class && ((Class<?>) t).isArray()) {
Class<?> arrayClass = (Class<?>) t;
return arrayClass.getComponentType();
} else if (t instanceof GenericArrayType) {
GenericArrayType arrayType = (GenericArrayType) t;
return arrayType.getGenericComponentType();
}
return null;
}
/**
* Get the parameter TypeInfo for a method.
*
* @param size
* In case the last parameter of Method is an array, we will try to
* return a {@code List<TypeInfo>} with the specified size by repeating the
* element of the array at the end. In case the size is smaller than
* the minimum possible number of arguments for the method, null will
* be returned.
*/
public static List<TypeInfo> getParameterTypeInfos(Method m, int size) {
Type[] methodParameterTypes = m.getGenericParameterTypes();
// Whether the method takes variable-length arguments
// Whether the method takes an array like Object[],
// or String[] etc in the last argument.
Type lastParaElementType = TypeInfoUtils
.getArrayElementType(methodParameterTypes.length == 0 ? null
: methodParameterTypes[methodParameterTypes.length - 1]);
boolean isVariableLengthArgument = (lastParaElementType != null);
List<TypeInfo> typeInfos = null;
if (!isVariableLengthArgument) {
// Normal case, no variable-length arguments
if (size != methodParameterTypes.length) {
return null;
}
typeInfos = new ArrayList<TypeInfo>(methodParameterTypes.length);
for (Type methodParameterType : methodParameterTypes) {
typeInfos.add(getExtendedTypeInfoFromJavaType(methodParameterType, m));
}
} else {
// Variable-length arguments
if (size < methodParameterTypes.length - 1) {
return null;
}
typeInfos = new ArrayList<TypeInfo>(size);
for (int i = 0; i < methodParameterTypes.length - 1; i++) {
typeInfos.add(getExtendedTypeInfoFromJavaType(methodParameterTypes[i],
m));
}
for (int i = methodParameterTypes.length - 1; i < size; i++) {
typeInfos.add(getExtendedTypeInfoFromJavaType(lastParaElementType, m));
}
}
return typeInfos;
}
public static boolean hasParameters(String typeName) {
int idx = typeName.indexOf('(');
if (idx == -1) {
return false;
} else {
return true;
}
}
public static String getBaseName(String typeName) {
int idx = typeName.indexOf('(');
if (idx == -1) {
return typeName;
} else {
return typeName.substring(0, idx);
}
}
/**
* returns true if both TypeInfos are of primitive type, and the primitive category matches.
* @param ti1
* @param ti2
* @return
*/
public static boolean doPrimitiveCategoriesMatch(TypeInfo ti1, TypeInfo ti2) {
if (ti1.getCategory() == Category.PRIMITIVE && ti2.getCategory() == Category.PRIMITIVE) {
if (((PrimitiveTypeInfo)ti1).getPrimitiveCategory()
== ((PrimitiveTypeInfo)ti2).getPrimitiveCategory()) {
return true;
}
}
return false;
}
/**
* Parse a recursive TypeInfo list String. For example, the following inputs
* are valid inputs:
* "int,string,map<string,int>,list<map<int,list<string>>>,list<struct<a:int,b:string>>"
* The separators between TypeInfos can be ",", ":", or ";".
*
* In order to use this class: TypeInfoParser parser = new
* TypeInfoParser("int,string"); ArrayList<TypeInfo> typeInfos =
* parser.parseTypeInfos();
*/
private static class TypeInfoParser {
private static class Token {
public int position;
public String text;
public boolean isType;
@Override
public String toString() {
return "" + position + ":" + text;
}
};
private static boolean isTypeChar(char c) {
return Character.isLetterOrDigit(c) || c == '_' || c == '.' || c == ' ' || c == '$';
}
/**
* Tokenize the typeInfoString. The rule is simple: all consecutive
* alphadigits and '_', '.' are in one token, and all other characters are
* one character per token.
*
* tokenize("map<int,string>") should return
* ["map","<","int",",","string",">"]
*
* Note that we add '$' in new Calcite return path. As '$' will not appear
* in any type in Hive, it is safe to do so.
*/
private static ArrayList<Token> tokenize(String typeInfoString) {
ArrayList<Token> tokens = new ArrayList<Token>(0);
int begin = 0;
int end = 1;
while (end <= typeInfoString.length()) {
// last character ends a token?
if (end == typeInfoString.length()
|| !isTypeChar(typeInfoString.charAt(end - 1))
|| !isTypeChar(typeInfoString.charAt(end))) {
Token t = new Token();
t.position = begin;
t.text = typeInfoString.substring(begin, end);
t.isType = isTypeChar(typeInfoString.charAt(begin));
tokens.add(t);
begin = end;
}
end++;
}
return tokens;
}
public TypeInfoParser(String typeInfoString) {
this.typeInfoString = typeInfoString;
typeInfoTokens = tokenize(typeInfoString);
}
private final String typeInfoString;
private final ArrayList<Token> typeInfoTokens;
private ArrayList<TypeInfo> typeInfos;
private int iToken;
public ArrayList<TypeInfo> parseTypeInfos() {
typeInfos = new ArrayList<TypeInfo>();
iToken = 0;
while (iToken < typeInfoTokens.size()) {
typeInfos.add(parseType());
if (iToken < typeInfoTokens.size()) {
Token separator = typeInfoTokens.get(iToken);
if (",".equals(separator.text) || ";".equals(separator.text)
|| ":".equals(separator.text)) {
iToken++;
} else {
throw new IllegalArgumentException(
"Error: ',', ':', or ';' expected at position "
+ separator.position + " from '" + typeInfoString + "' "
+ typeInfoTokens);
}
}
}
return typeInfos;
}
private Token peek() {
if (iToken < typeInfoTokens.size()) {
return typeInfoTokens.get(iToken);
} else {
return null;
}
}
private Token expect(String item) {
return expect(item, null);
}
private Token expect(String item, String alternative) {
if (iToken >= typeInfoTokens.size()) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the end of '" + typeInfoString + "'");
}
Token t = typeInfoTokens.get(iToken);
if (item.equals("type")) {
if (!serdeConstants.LIST_TYPE_NAME.equals(t.text)
&& !serdeConstants.MAP_TYPE_NAME.equals(t.text)
&& !serdeConstants.STRUCT_TYPE_NAME.equals(t.text)
&& !serdeConstants.UNION_TYPE_NAME.equals(t.text)
&& null == PrimitiveObjectInspectorUtils
.getTypeEntryFromTypeName(t.text)
&& !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
} else if (item.equals("name")) {
if (!t.isType && !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
} else {
if (!item.equals(t.text) && !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
}
iToken++;
return t;
}
private String[] parseParams() {
List<String> params = new LinkedList<String>();
Token t = peek();
if (t != null && t.text.equals("(")) {
expect("(");
// checking for null in the for-loop condition prevents null-ptr exception
// and allows us to fail more gracefully with a parsing error.
for(t = peek(); (t == null) || !t.text.equals(")"); t = expect(",",")")) {
params.add(expect("name").text);
}
if (params.size() == 0) {
throw new IllegalArgumentException(
"type parameters expected for type string " + typeInfoString);
}
}
return params.toArray(new String[params.size()]);
}
private TypeInfo parseType() {
Token t = expect("type");
// Is this a primitive type?
PrimitiveTypeEntry typeEntry =
PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN ) {
String[] params = parseParams();
switch (typeEntry.primitiveCategory) {
case CHAR:
case VARCHAR:
if (params == null || params.length == 0) {
throw new IllegalArgumentException(typeEntry.typeName
+ " type is specified without length: " + typeInfoString);
}
int length = 1;
if (params.length == 1) {
length = Integer.parseInt(params[0]);
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
BaseCharUtils.validateVarcharParameter(length);
return TypeInfoFactory.getVarcharTypeInfo(length);
} else {
BaseCharUtils.validateCharParameter(length);
return TypeInfoFactory.getCharTypeInfo(length);
}
} else if (params.length > 1) {
throw new IllegalArgumentException(
"Type " + typeEntry.typeName+ " only takes one parameter, but " +
params.length + " is seen");
}
case DECIMAL:
int precision = HiveDecimal.USER_DEFAULT_PRECISION;
int scale = HiveDecimal.USER_DEFAULT_SCALE;
if (params == null || params.length == 0) {
// It's possible that old metadata still refers to "decimal" as a column type w/o
// precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
} else if (params.length == 2) {
// New metadata always have two parameters.
precision = Integer.parseInt(params[0]);
scale = Integer.parseInt(params[1]);
HiveDecimalUtils.validateParameter(precision, scale);
} else if (params.length > 2) {
throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
params.length + " is seen");
}
return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
default:
return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName);
}
}
// Is this a list type?
if (serdeConstants.LIST_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo listElementType = parseType();
expect(">");
return TypeInfoFactory.getListTypeInfo(listElementType);
}
// Is this a map type?
if (serdeConstants.MAP_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo mapKeyType = parseType();
expect(",");
TypeInfo mapValueType = parseType();
expect(">");
return TypeInfoFactory.getMapTypeInfo(mapKeyType, mapValueType);
}
// Is this a struct type?
if (serdeConstants.STRUCT_TYPE_NAME.equals(t.text)) {
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of struct
break;
}
}
Token name = expect("name",">");
if (name.text.equals(">")) {
break;
}
fieldNames.add(name.text);
expect(":");
fieldTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
// Is this a union type?
if (serdeConstants.UNION_TYPE_NAME.equals(t.text)) {
List<TypeInfo> objectTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of union
break;
}
}
objectTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
}
throw new RuntimeException("Internal error parsing position "
+ t.position + " of '" + typeInfoString + "'");
}
public PrimitiveParts parsePrimitiveParts() {
PrimitiveParts parts = new PrimitiveParts();
Token t = expect("type");
parts.typeName = t.text;
parts.typeParams = parseParams();
return parts;
}
}
public static class PrimitiveParts {
public String typeName;
public String[] typeParams;
}
/**
* Make some of the TypeInfo parsing available as a utility.
*/
public static PrimitiveParts parsePrimitiveParts(String typeInfoString) {
TypeInfoParser parser = new TypeInfoParser(typeInfoString);
return parser.parsePrimitiveParts();
}
static ConcurrentHashMap<TypeInfo, ObjectInspector> cachedStandardObjectInspector =
new ConcurrentHashMap<TypeInfo, ObjectInspector>();
/**
* Returns the standard object inspector that can be used to translate an
* object of that typeInfo to a standard object type.
*/
public static ObjectInspector getStandardWritableObjectInspectorFromTypeInfo(
TypeInfo typeInfo) {
ObjectInspector result = cachedStandardObjectInspector.get(typeInfo);
if (result == null) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
result = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(PrimitiveTypeInfo) typeInfo);
break;
}
case LIST: {
ObjectInspector elementObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo)
.getListElementTypeInfo());
result = ObjectInspectorFactory
.getStandardListObjectInspector(elementObjectInspector);
break;
}
case MAP: {
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
ObjectInspector keyObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapKeyTypeInfo());
ObjectInspector valueObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo());
result = ObjectInspectorFactory.getStandardMapObjectInspector(
keyObjectInspector, valueObjectInspector);
break;
}
case STRUCT: {
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = structTypeInfo
.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardWritableObjectInspectorFromTypeInfo(fieldTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldObjectInspectors);
break;
}
case UNION: {
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> objectTypeInfos = unionTypeInfo
.getAllUnionObjectTypeInfos();
List<ObjectInspector> fieldObjectInspectors =
new ArrayList<ObjectInspector>(objectTypeInfos.size());
for (int i = 0; i < objectTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardWritableObjectInspectorFromTypeInfo(objectTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardUnionObjectInspector(
fieldObjectInspectors);
break;
}
default: {
result = null;
}
}
ObjectInspector prev =
cachedStandardObjectInspector.putIfAbsent(typeInfo, result);
if (prev != null) {
result = prev;
}
}
return result;
}
static ConcurrentHashMap<TypeInfo, ObjectInspector> cachedStandardJavaObjectInspector =
new ConcurrentHashMap<TypeInfo, ObjectInspector>();
/**
* Returns the standard object inspector that can be used to translate an
* object of that typeInfo to a standard object type.
*/
public static ObjectInspector getStandardJavaObjectInspectorFromTypeInfo(
TypeInfo typeInfo) {
ObjectInspector result = cachedStandardJavaObjectInspector.get(typeInfo);
if (result == null) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
// NOTE: we use JavaPrimitiveObjectInspector instead of
// StandardPrimitiveObjectInspector
result = PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector((PrimitiveTypeInfo) typeInfo);
break;
}
case LIST: {
ObjectInspector elementObjectInspector =
getStandardJavaObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo)
.getListElementTypeInfo());
result = ObjectInspectorFactory
.getStandardListObjectInspector(elementObjectInspector);
break;
}
case MAP: {
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
ObjectInspector keyObjectInspector = getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo
.getMapKeyTypeInfo());
ObjectInspector valueObjectInspector =
getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo());
result = ObjectInspectorFactory.getStandardMapObjectInspector(
keyObjectInspector, valueObjectInspector);
break;
}
case STRUCT: {
StructTypeInfo strucTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = strucTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = strucTypeInfo
.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardJavaObjectInspectorFromTypeInfo(fieldTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldObjectInspectors);
break;
}
case UNION: {
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> objectTypeInfos = unionTypeInfo
.getAllUnionObjectTypeInfos();
List<ObjectInspector> fieldObjectInspectors =
new ArrayList<ObjectInspector>(objectTypeInfos.size());
for (int i = 0; i < objectTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardJavaObjectInspectorFromTypeInfo(objectTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardUnionObjectInspector(
fieldObjectInspectors);
break;
}
default: {
result = null;
}
}
ObjectInspector prev =
cachedStandardJavaObjectInspector.putIfAbsent(typeInfo, result);
if (prev != null) {
result = prev;
}
}
return result;
}
/**
* Get the TypeInfo object from the ObjectInspector object by recursively
* going into the ObjectInspector structure.
*/
public static TypeInfo getTypeInfoFromObjectInspector(ObjectInspector oi) {
// OPTIMIZATION for later.
// if (oi instanceof TypeInfoBasedObjectInspector) {
// TypeInfoBasedObjectInspector typeInfoBasedObjectInspector =
// (ObjectInspector)oi;
// return typeInfoBasedObjectInspector.getTypeInfo();
// }
if (oi == null) {
return null;
}
// Recursively going into ObjectInspector structure
TypeInfo result = null;
switch (oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
result = poi.getTypeInfo();
break;
}
case LIST: {
ListObjectInspector loi = (ListObjectInspector) oi;
result = TypeInfoFactory
.getListTypeInfo(getTypeInfoFromObjectInspector(loi
.getListElementObjectInspector()));
break;
}
case MAP: {
MapObjectInspector moi = (MapObjectInspector) oi;
result = TypeInfoFactory.getMapTypeInfo(
getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()),
getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector()));
break;
}
case STRUCT: {
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<String> fieldNames = new ArrayList<String>(fields.size());
List<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.size());
for (StructField f : fields) {
fieldNames.add(f.getFieldName());
fieldTypeInfos.add(getTypeInfoFromObjectInspector(f
.getFieldObjectInspector()));
}
result = TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
break;
}
case UNION: {
UnionObjectInspector uoi = (UnionObjectInspector) oi;
List<TypeInfo> objectTypeInfos = new ArrayList<TypeInfo>();
for (ObjectInspector eoi : uoi.getObjectInspectors()) {
objectTypeInfos.add(getTypeInfoFromObjectInspector(eoi));
}
result = TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
break;
}
default: {
throw new RuntimeException("Unknown ObjectInspector category!");
}
}
return result;
}
public static ArrayList<TypeInfo> typeInfosFromStructObjectInspector(
StructObjectInspector structObjectInspector) {
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
ArrayList<TypeInfo> typeInfoList = new ArrayList<TypeInfo>(fields.size());
for(StructField field : fields) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(
field.getFieldObjectInspector().getTypeName());
typeInfoList.add(typeInfo);
}
return typeInfoList;
}
public static ArrayList<TypeInfo> typeInfosFromTypeNames(List<String> typeNames) {
ArrayList<TypeInfo> result = new ArrayList<TypeInfo>(typeNames.size());
for(int i = 0; i < typeNames.size(); i++) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i));
result.add(typeInfo);
}
return result;
}
public static ArrayList<TypeInfo> getTypeInfosFromTypeString(String typeString) {
TypeInfoParser parser = new TypeInfoParser(typeString);
return parser.parseTypeInfos();
}
public static List<String> getTypeStringsFromTypeInfo(List<TypeInfo> typeInfos) {
if (typeInfos == null) {
return null;
}
List<String> result = new ArrayList<>(typeInfos.size());
for (TypeInfo typeInfo : typeInfos) {
result.add(typeInfo.toString());
}
return result;
}
public static TypeInfo getTypeInfoFromTypeString(String typeString) {
TypeInfoParser parser = new TypeInfoParser(typeString);
return parser.parseTypeInfos().get(0);
}
/**
* Given two types, determine whether conversion needs to occur to compare the two types.
* This is needed for cases like varchar, where the TypeInfo for varchar(10) != varchar(5),
* but there would be no need to have to convert to compare these values.
* @param typeA
* @param typeB
* @return
*/
public static boolean isConversionRequiredForComparison(TypeInfo typeA, TypeInfo typeB) {
if (typeA.equals(typeB)) {
return false;
}
if (TypeInfoUtils.doPrimitiveCategoriesMatch(typeA, typeB)) {
return false;
}
return true;
}
/**
* Return the character length of the type
* @param typeInfo
* @return
*/
public static int getCharacterLengthForType(PrimitiveTypeInfo typeInfo) {
switch (typeInfo.getPrimitiveCategory()) {
case STRING:
return HiveVarchar.MAX_VARCHAR_LENGTH;
case CHAR:
case VARCHAR:
BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) typeInfo;
return baseCharTypeInfo.getLength();
default:
return 0;
}
}
public static void registerNumericType(PrimitiveCategory primitiveCategory, int level) {
numericTypeList.add(primitiveCategory);
numericTypes.put(primitiveCategory, level);
}
public static boolean implicitConvertible(PrimitiveCategory from, PrimitiveCategory to) {
if (from == to) {
return true;
}
PrimitiveGrouping fromPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(from);
PrimitiveGrouping toPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(to);
// Allow implicit String to Double conversion
if (fromPg == PrimitiveGrouping.STRING_GROUP && to == PrimitiveCategory.DOUBLE) {
return true;
}
// Allow implicit String to Decimal conversion
if (fromPg == PrimitiveGrouping.STRING_GROUP && to == PrimitiveCategory.DECIMAL) {
return true;
}
// Void can be converted to any type
if (from == PrimitiveCategory.VOID) {
return true;
}
// Allow implicit String to Date conversion
if (fromPg == PrimitiveGrouping.DATE_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit Numeric to String conversion
if (fromPg == PrimitiveGrouping.NUMERIC_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit String to varchar conversion, and vice versa
if (fromPg == PrimitiveGrouping.STRING_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit conversion from Byte -> Integer -> Long -> Float -> Double
// Decimal -> String
Integer f = numericTypes.get(from);
Integer t = numericTypes.get(to);
if (f == null || t == null) {
return false;
}
if (f.intValue() > t.intValue()) {
return false;
}
return true;
}
/**
* Returns whether it is possible to implicitly convert an object of Class
* from to Class to.
*/
public static boolean implicitConvertible(TypeInfo from, TypeInfo to) {
if (from.equals(to)) {
return true;
}
// Reimplemented to use PrimitiveCategory rather than TypeInfo, because
// 2 TypeInfos from the same qualified type (varchar, decimal) should still be
// seen as equivalent.
if (from.getCategory() == Category.PRIMITIVE && to.getCategory() == Category.PRIMITIVE) {
return implicitConvertible(
((PrimitiveTypeInfo) from).getPrimitiveCategory(),
((PrimitiveTypeInfo) to).getPrimitiveCategory());
}
return false;
}
}
|
googleapis/google-cloud-java | 35,313 | java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/InlineSecret.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudbuild/v1/cloudbuild.proto
// Protobuf Java Version: 3.25.8
package com.google.cloudbuild.v1;
/**
*
*
* <pre>
* Pairs a set of secret environment variables mapped to encrypted
* values with the Cloud KMS key to use to decrypt the value.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v1.InlineSecret}
*/
public final class InlineSecret extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.InlineSecret)
InlineSecretOrBuilder {
private static final long serialVersionUID = 0L;
// Use InlineSecret.newBuilder() to construct.
private InlineSecret(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InlineSecret() {
kmsKeyName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InlineSecret();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetEnvMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v1.InlineSecret.class,
com.google.cloudbuild.v1.InlineSecret.Builder.class);
}
public static final int KMS_KEY_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object kmsKeyName_ = "";
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The kmsKeyName.
*/
@java.lang.Override
public java.lang.String getKmsKeyName() {
java.lang.Object ref = kmsKeyName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
kmsKeyName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for kmsKeyName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKmsKeyNameBytes() {
java.lang.Object ref = kmsKeyName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
kmsKeyName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENV_MAP_FIELD_NUMBER = 2;
private static final class EnvMapDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.ByteString>
defaultEntry =
com.google.protobuf.MapEntry
.<java.lang.String, com.google.protobuf.ByteString>newDefaultInstance(
com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_EnvMapEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.BYTES,
com.google.protobuf.ByteString.EMPTY);
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString> envMap_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
internalGetEnvMap() {
if (envMap_ == null) {
return com.google.protobuf.MapField.emptyMapField(EnvMapDefaultEntryHolder.defaultEntry);
}
return envMap_;
}
public int getEnvMapCount() {
return internalGetEnvMap().getMap().size();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public boolean containsEnvMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetEnvMap().getMap().containsKey(key);
}
/** Use {@link #getEnvMapMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getEnvMap() {
return getEnvMapMap();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getEnvMapMap() {
return internalGetEnvMap().getMap();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.ByteString getEnvMapOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.ByteString defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
internalGetEnvMap().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.ByteString getEnvMapOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
internalGetEnvMap().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKeyName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kmsKeyName_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetEnvMap(), EnvMapDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kmsKeyName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kmsKeyName_);
}
for (java.util.Map.Entry<java.lang.String, com.google.protobuf.ByteString> entry :
internalGetEnvMap().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.ByteString> envMap__ =
EnvMapDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, envMap__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloudbuild.v1.InlineSecret)) {
return super.equals(obj);
}
com.google.cloudbuild.v1.InlineSecret other = (com.google.cloudbuild.v1.InlineSecret) obj;
if (!getKmsKeyName().equals(other.getKmsKeyName())) return false;
if (!internalGetEnvMap().equals(other.internalGetEnvMap())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + KMS_KEY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getKmsKeyName().hashCode();
if (!internalGetEnvMap().getMap().isEmpty()) {
hash = (37 * hash) + ENV_MAP_FIELD_NUMBER;
hash = (53 * hash) + internalGetEnvMap().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v1.InlineSecret parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.InlineSecret parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v1.InlineSecret parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloudbuild.v1.InlineSecret prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Pairs a set of secret environment variables mapped to encrypted
* values with the Cloud KMS key to use to decrypt the value.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v1.InlineSecret}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.InlineSecret)
com.google.cloudbuild.v1.InlineSecretOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetEnvMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableEnvMap();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v1.InlineSecret.class,
com.google.cloudbuild.v1.InlineSecret.Builder.class);
}
// Construct using com.google.cloudbuild.v1.InlineSecret.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
kmsKeyName_ = "";
internalGetMutableEnvMap().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloudbuild.v1.Cloudbuild
.internal_static_google_devtools_cloudbuild_v1_InlineSecret_descriptor;
}
@java.lang.Override
public com.google.cloudbuild.v1.InlineSecret getDefaultInstanceForType() {
return com.google.cloudbuild.v1.InlineSecret.getDefaultInstance();
}
@java.lang.Override
public com.google.cloudbuild.v1.InlineSecret build() {
com.google.cloudbuild.v1.InlineSecret result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloudbuild.v1.InlineSecret buildPartial() {
com.google.cloudbuild.v1.InlineSecret result =
new com.google.cloudbuild.v1.InlineSecret(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloudbuild.v1.InlineSecret result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.kmsKeyName_ = kmsKeyName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.envMap_ = internalGetEnvMap();
result.envMap_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloudbuild.v1.InlineSecret) {
return mergeFrom((com.google.cloudbuild.v1.InlineSecret) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloudbuild.v1.InlineSecret other) {
if (other == com.google.cloudbuild.v1.InlineSecret.getDefaultInstance()) return this;
if (!other.getKmsKeyName().isEmpty()) {
kmsKeyName_ = other.kmsKeyName_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableEnvMap().mergeFrom(other.internalGetEnvMap());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
kmsKeyName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, com.google.protobuf.ByteString>
envMap__ =
input.readMessage(
EnvMapDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableEnvMap()
.getMutableMap()
.put(envMap__.getKey(), envMap__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object kmsKeyName_ = "";
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The kmsKeyName.
*/
public java.lang.String getKmsKeyName() {
java.lang.Object ref = kmsKeyName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
kmsKeyName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for kmsKeyName.
*/
public com.google.protobuf.ByteString getKmsKeyNameBytes() {
java.lang.Object ref = kmsKeyName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
kmsKeyName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The kmsKeyName to set.
* @return This builder for chaining.
*/
public Builder setKmsKeyName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
kmsKeyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearKmsKeyName() {
kmsKeyName_ = getDefaultInstance().getKmsKeyName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of Cloud KMS crypto key to decrypt the encrypted value.
* In format: projects/*/locations/*/keyRings/*/cryptoKeys/*
* </pre>
*
* <code>string kms_key_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for kmsKeyName to set.
* @return This builder for chaining.
*/
public Builder setKmsKeyNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
kmsKeyName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString> envMap_;
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
internalGetEnvMap() {
if (envMap_ == null) {
return com.google.protobuf.MapField.emptyMapField(EnvMapDefaultEntryHolder.defaultEntry);
}
return envMap_;
}
private com.google.protobuf.MapField<java.lang.String, com.google.protobuf.ByteString>
internalGetMutableEnvMap() {
if (envMap_ == null) {
envMap_ = com.google.protobuf.MapField.newMapField(EnvMapDefaultEntryHolder.defaultEntry);
}
if (!envMap_.isMutable()) {
envMap_ = envMap_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return envMap_;
}
public int getEnvMapCount() {
return internalGetEnvMap().getMap().size();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public boolean containsEnvMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetEnvMap().getMap().containsKey(key);
}
/** Use {@link #getEnvMapMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getEnvMap() {
return getEnvMapMap();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getEnvMapMap() {
return internalGetEnvMap().getMap();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public /* nullable */ com.google.protobuf.ByteString getEnvMapOrDefault(
java.lang.String key,
/* nullable */
com.google.protobuf.ByteString defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
internalGetEnvMap().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.ByteString getEnvMapOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, com.google.protobuf.ByteString> map =
internalGetEnvMap().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearEnvMap() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableEnvMap().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
public Builder removeEnvMap(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableEnvMap().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, com.google.protobuf.ByteString> getMutableEnvMap() {
bitField0_ |= 0x00000002;
return internalGetMutableEnvMap().getMutableMap();
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
public Builder putEnvMap(java.lang.String key, com.google.protobuf.ByteString value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableEnvMap().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* Map of environment variable name to its encrypted value.
*
* Secret environment variables must be unique across all of a build's
* secrets, and must be used by at least one build step. Values can be at most
* 64 KB in size. There can be at most 100 secret values across all of a
* build's secrets.
* </pre>
*
* <code>map<string, bytes> env_map = 2;</code>
*/
public Builder putAllEnvMap(
java.util.Map<java.lang.String, com.google.protobuf.ByteString> values) {
internalGetMutableEnvMap().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.InlineSecret)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.InlineSecret)
private static final com.google.cloudbuild.v1.InlineSecret DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloudbuild.v1.InlineSecret();
}
public static com.google.cloudbuild.v1.InlineSecret getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InlineSecret> PARSER =
new com.google.protobuf.AbstractParser<InlineSecret>() {
@java.lang.Override
public InlineSecret parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InlineSecret> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InlineSecret> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloudbuild.v1.InlineSecret getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,339 | java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/CreateProcessorRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1/document_processor_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.documentai.v1;
/**
*
*
* <pre>
* Request message for the
* [CreateProcessor][google.cloud.documentai.v1.DocumentProcessorService.CreateProcessor]
* method. Notice this request is sent to a regionalized backend service. If the
* [ProcessorType][google.cloud.documentai.v1.ProcessorType] isn't available in
* that region, the creation fails.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1.CreateProcessorRequest}
*/
public final class CreateProcessorRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.CreateProcessorRequest)
CreateProcessorRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateProcessorRequest.newBuilder() to construct.
private CreateProcessorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateProcessorRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateProcessorRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_CreateProcessorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_CreateProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1.CreateProcessorRequest.class,
com.google.cloud.documentai.v1.CreateProcessorRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROCESSOR_FIELD_NUMBER = 2;
private com.google.cloud.documentai.v1.Processor processor_;
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the processor field is set.
*/
@java.lang.Override
public boolean hasProcessor() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The processor.
*/
@java.lang.Override
public com.google.cloud.documentai.v1.Processor getProcessor() {
return processor_ == null
? com.google.cloud.documentai.v1.Processor.getDefaultInstance()
: processor_;
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.documentai.v1.ProcessorOrBuilder getProcessorOrBuilder() {
return processor_ == null
? com.google.cloud.documentai.v1.Processor.getDefaultInstance()
: processor_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getProcessor());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getProcessor());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1.CreateProcessorRequest)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1.CreateProcessorRequest other =
(com.google.cloud.documentai.v1.CreateProcessorRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasProcessor() != other.hasProcessor()) return false;
if (hasProcessor()) {
if (!getProcessor().equals(other.getProcessor())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasProcessor()) {
hash = (37 * hash) + PROCESSOR_FIELD_NUMBER;
hash = (53 * hash) + getProcessor().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1.CreateProcessorRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the
* [CreateProcessor][google.cloud.documentai.v1.DocumentProcessorService.CreateProcessor]
* method. Notice this request is sent to a regionalized backend service. If the
* [ProcessorType][google.cloud.documentai.v1.ProcessorType] isn't available in
* that region, the creation fails.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1.CreateProcessorRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.CreateProcessorRequest)
com.google.cloud.documentai.v1.CreateProcessorRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_CreateProcessorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_CreateProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1.CreateProcessorRequest.class,
com.google.cloud.documentai.v1.CreateProcessorRequest.Builder.class);
}
// Construct using com.google.cloud.documentai.v1.CreateProcessorRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getProcessorFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
processor_ = null;
if (processorBuilder_ != null) {
processorBuilder_.dispose();
processorBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1_CreateProcessorRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1.CreateProcessorRequest getDefaultInstanceForType() {
return com.google.cloud.documentai.v1.CreateProcessorRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1.CreateProcessorRequest build() {
com.google.cloud.documentai.v1.CreateProcessorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1.CreateProcessorRequest buildPartial() {
com.google.cloud.documentai.v1.CreateProcessorRequest result =
new com.google.cloud.documentai.v1.CreateProcessorRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.documentai.v1.CreateProcessorRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.processor_ = processorBuilder_ == null ? processor_ : processorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1.CreateProcessorRequest) {
return mergeFrom((com.google.cloud.documentai.v1.CreateProcessorRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.documentai.v1.CreateProcessorRequest other) {
if (other == com.google.cloud.documentai.v1.CreateProcessorRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasProcessor()) {
mergeProcessor(other.getProcessor());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getProcessorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent (project and location) under which to create the
* processor. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.documentai.v1.Processor processor_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1.Processor,
com.google.cloud.documentai.v1.Processor.Builder,
com.google.cloud.documentai.v1.ProcessorOrBuilder>
processorBuilder_;
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the processor field is set.
*/
public boolean hasProcessor() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The processor.
*/
public com.google.cloud.documentai.v1.Processor getProcessor() {
if (processorBuilder_ == null) {
return processor_ == null
? com.google.cloud.documentai.v1.Processor.getDefaultInstance()
: processor_;
} else {
return processorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcessor(com.google.cloud.documentai.v1.Processor value) {
if (processorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
processor_ = value;
} else {
processorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcessor(com.google.cloud.documentai.v1.Processor.Builder builderForValue) {
if (processorBuilder_ == null) {
processor_ = builderForValue.build();
} else {
processorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeProcessor(com.google.cloud.documentai.v1.Processor value) {
if (processorBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& processor_ != null
&& processor_ != com.google.cloud.documentai.v1.Processor.getDefaultInstance()) {
getProcessorBuilder().mergeFrom(value);
} else {
processor_ = value;
}
} else {
processorBuilder_.mergeFrom(value);
}
if (processor_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearProcessor() {
bitField0_ = (bitField0_ & ~0x00000002);
processor_ = null;
if (processorBuilder_ != null) {
processorBuilder_.dispose();
processorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.documentai.v1.Processor.Builder getProcessorBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getProcessorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.documentai.v1.ProcessorOrBuilder getProcessorOrBuilder() {
if (processorBuilder_ != null) {
return processorBuilder_.getMessageOrBuilder();
} else {
return processor_ == null
? com.google.cloud.documentai.v1.Processor.getDefaultInstance()
: processor_;
}
}
/**
*
*
* <pre>
* Required. The processor to be created, requires
* [Processor.type][google.cloud.documentai.v1.Processor.type] and
* [Processor.display_name][google.cloud.documentai.v1.Processor.display_name]
* to be set. Also, the
* [Processor.kms_key_name][google.cloud.documentai.v1.Processor.kms_key_name]
* field must be set if the processor is under CMEK.
* </pre>
*
* <code>
* .google.cloud.documentai.v1.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1.Processor,
com.google.cloud.documentai.v1.Processor.Builder,
com.google.cloud.documentai.v1.ProcessorOrBuilder>
getProcessorFieldBuilder() {
if (processorBuilder_ == null) {
processorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.documentai.v1.Processor,
com.google.cloud.documentai.v1.Processor.Builder,
com.google.cloud.documentai.v1.ProcessorOrBuilder>(
getProcessor(), getParentForChildren(), isClean());
processor_ = null;
}
return processorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.CreateProcessorRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.CreateProcessorRequest)
private static final com.google.cloud.documentai.v1.CreateProcessorRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.CreateProcessorRequest();
}
public static com.google.cloud.documentai.v1.CreateProcessorRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateProcessorRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateProcessorRequest>() {
@java.lang.Override
public CreateProcessorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateProcessorRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateProcessorRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1.CreateProcessorRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/phoenix-connectors | 35,330 | phoenix5-hive4/src/main/java/org/apache/phoenix/hive/util/TypeInfoUtils.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hive.util;
import java.lang.reflect.Field;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
/**
* TypeInfoUtils.
*
*/
public final class TypeInfoUtils {
public static List<PrimitiveCategory> numericTypeList = new ArrayList<PrimitiveCategory>();
// The ordering of types here is used to determine which numeric types
// are common/convertible to one another. Probably better to rely on the
// ordering explicitly defined here than to assume that the enum values
// that were arbitrarily assigned in PrimitiveCategory work for our purposes.
public static EnumMap<PrimitiveCategory, Integer> numericTypes =
new EnumMap<PrimitiveCategory, Integer>(PrimitiveCategory.class);
static {
registerNumericType(PrimitiveCategory.BYTE, 1);
registerNumericType(PrimitiveCategory.SHORT, 2);
registerNumericType(PrimitiveCategory.INT, 3);
registerNumericType(PrimitiveCategory.LONG, 4);
registerNumericType(PrimitiveCategory.FLOAT, 5);
registerNumericType(PrimitiveCategory.DOUBLE, 6);
registerNumericType(PrimitiveCategory.DECIMAL, 7);
registerNumericType(PrimitiveCategory.STRING, 8);
}
private TypeInfoUtils() {
// prevent instantiation
}
/**
* Return the extended TypeInfo from a Java type. By extended TypeInfo, we
* allow unknownType for java.lang.Object.
*
* @param t
* The Java type.
* @param m
* The method, only used for generating error messages.
*/
private static TypeInfo getExtendedTypeInfoFromJavaType(Type t, Method m) {
if (t == Object.class) {
return TypeInfoFactory.unknownTypeInfo;
}
if (t instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) t;
// List?
if (List.class == (Class<?>) pt.getRawType()
|| ArrayList.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getListTypeInfo(getExtendedTypeInfoFromJavaType(
pt.getActualTypeArguments()[0], m));
}
// Map?
if (Map.class == (Class<?>) pt.getRawType()
|| HashMap.class == (Class<?>) pt.getRawType()) {
return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType(
pt.getActualTypeArguments()[0], m),
getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m));
}
// Otherwise convert t to RawType so we will fall into the following if
// block.
t = pt.getRawType();
}
// Must be a class.
if (!(t instanceof Class)) {
throw new RuntimeException("Hive does not understand type " + t
+ " from " + m);
}
Class<?> c = (Class<?>) t;
// Java Primitive Type?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaType(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveJavaType(c).primitiveCategory));
}
// Java Primitive Class?
if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory));
}
// Primitive Writable class?
if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(c)) {
return TypeInfoUtils
.getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory
.getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils
.getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory));
}
// Must be a struct
Field[] fields = ObjectInspectorUtils.getDeclaredNonStaticFields(c);
ArrayList<String> fieldNames = new ArrayList<String>(fields.length);
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.length);
for (Field field : fields) {
fieldNames.add(field.getName());
fieldTypeInfos.add(getExtendedTypeInfoFromJavaType(
field.getGenericType(), m));
}
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
/**
* Returns the array element type, if the Type is an array (Object[]), or
* GenericArrayType ({@code Map<String,String>[]}). Otherwise return null.
*/
public static Type getArrayElementType(Type t) {
if (t instanceof Class && ((Class<?>) t).isArray()) {
Class<?> arrayClass = (Class<?>) t;
return arrayClass.getComponentType();
} else if (t instanceof GenericArrayType) {
GenericArrayType arrayType = (GenericArrayType) t;
return arrayType.getGenericComponentType();
}
return null;
}
/**
* Get the parameter TypeInfo for a method.
*
* @param size
* In case the last parameter of Method is an array, we will try to
* return a {@code List<TypeInfo>} with the specified size by repeating the
* element of the array at the end. In case the size is smaller than
* the minimum possible number of arguments for the method, null will
* be returned.
*/
public static List<TypeInfo> getParameterTypeInfos(Method m, int size) {
Type[] methodParameterTypes = m.getGenericParameterTypes();
// Whether the method takes variable-length arguments
// Whether the method takes an array like Object[],
// or String[] etc in the last argument.
Type lastParaElementType = TypeInfoUtils
.getArrayElementType(methodParameterTypes.length == 0 ? null
: methodParameterTypes[methodParameterTypes.length - 1]);
boolean isVariableLengthArgument = (lastParaElementType != null);
List<TypeInfo> typeInfos = null;
if (!isVariableLengthArgument) {
// Normal case, no variable-length arguments
if (size != methodParameterTypes.length) {
return null;
}
typeInfos = new ArrayList<TypeInfo>(methodParameterTypes.length);
for (Type methodParameterType : methodParameterTypes) {
typeInfos.add(getExtendedTypeInfoFromJavaType(methodParameterType, m));
}
} else {
// Variable-length arguments
if (size < methodParameterTypes.length - 1) {
return null;
}
typeInfos = new ArrayList<TypeInfo>(size);
for (int i = 0; i < methodParameterTypes.length - 1; i++) {
typeInfos.add(getExtendedTypeInfoFromJavaType(methodParameterTypes[i],
m));
}
for (int i = methodParameterTypes.length - 1; i < size; i++) {
typeInfos.add(getExtendedTypeInfoFromJavaType(lastParaElementType, m));
}
}
return typeInfos;
}
public static boolean hasParameters(String typeName) {
int idx = typeName.indexOf('(');
if (idx == -1) {
return false;
} else {
return true;
}
}
public static String getBaseName(String typeName) {
int idx = typeName.indexOf('(');
if (idx == -1) {
return typeName;
} else {
return typeName.substring(0, idx);
}
}
/**
* returns true if both TypeInfos are of primitive type, and the primitive category matches.
* @param ti1
* @param ti2
* @return
*/
public static boolean doPrimitiveCategoriesMatch(TypeInfo ti1, TypeInfo ti2) {
if (ti1.getCategory() == Category.PRIMITIVE && ti2.getCategory() == Category.PRIMITIVE) {
if (((PrimitiveTypeInfo)ti1).getPrimitiveCategory()
== ((PrimitiveTypeInfo)ti2).getPrimitiveCategory()) {
return true;
}
}
return false;
}
/**
* Parse a recursive TypeInfo list String. For example, the following inputs
* are valid inputs:
* "int,string,map<string,int>,list<map<int,list<string>>>,list<struct<a:int,b:string>>"
* The separators between TypeInfos can be ",", ":", or ";".
*
* In order to use this class: TypeInfoParser parser = new
* TypeInfoParser("int,string"); ArrayList<TypeInfo> typeInfos =
* parser.parseTypeInfos();
*/
private static class TypeInfoParser {
private static class Token {
public int position;
public String text;
public boolean isType;
@Override
public String toString() {
return "" + position + ":" + text;
}
};
private static boolean isTypeChar(char c) {
return Character.isLetterOrDigit(c) || c == '_' || c == '.' || c == ' ' || c == '$';
}
/**
* Tokenize the typeInfoString. The rule is simple: all consecutive
* alphadigits and '_', '.' are in one token, and all other characters are
* one character per token.
*
* tokenize("map<int,string>") should return
* ["map","<","int",",","string",">"]
*
* Note that we add '$' in new Calcite return path. As '$' will not appear
* in any type in Hive, it is safe to do so.
*/
private static ArrayList<Token> tokenize(String typeInfoString) {
ArrayList<Token> tokens = new ArrayList<Token>(0);
int begin = 0;
int end = 1;
while (end <= typeInfoString.length()) {
// last character ends a token?
if (end == typeInfoString.length()
|| !isTypeChar(typeInfoString.charAt(end - 1))
|| !isTypeChar(typeInfoString.charAt(end))) {
Token t = new Token();
t.position = begin;
t.text = typeInfoString.substring(begin, end);
t.isType = isTypeChar(typeInfoString.charAt(begin));
tokens.add(t);
begin = end;
}
end++;
}
return tokens;
}
public TypeInfoParser(String typeInfoString) {
this.typeInfoString = typeInfoString;
typeInfoTokens = tokenize(typeInfoString);
}
private final String typeInfoString;
private final ArrayList<Token> typeInfoTokens;
private ArrayList<TypeInfo> typeInfos;
private int iToken;
public ArrayList<TypeInfo> parseTypeInfos() {
typeInfos = new ArrayList<TypeInfo>();
iToken = 0;
while (iToken < typeInfoTokens.size()) {
typeInfos.add(parseType());
if (iToken < typeInfoTokens.size()) {
Token separator = typeInfoTokens.get(iToken);
if (",".equals(separator.text) || ";".equals(separator.text)
|| ":".equals(separator.text)) {
iToken++;
} else {
throw new IllegalArgumentException(
"Error: ',', ':', or ';' expected at position "
+ separator.position + " from '" + typeInfoString + "' "
+ typeInfoTokens);
}
}
}
return typeInfos;
}
private Token peek() {
if (iToken < typeInfoTokens.size()) {
return typeInfoTokens.get(iToken);
} else {
return null;
}
}
private Token expect(String item) {
return expect(item, null);
}
private Token expect(String item, String alternative) {
if (iToken >= typeInfoTokens.size()) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the end of '" + typeInfoString + "'");
}
Token t = typeInfoTokens.get(iToken);
if (item.equals("type")) {
if (!serdeConstants.LIST_TYPE_NAME.equals(t.text)
&& !serdeConstants.MAP_TYPE_NAME.equals(t.text)
&& !serdeConstants.STRUCT_TYPE_NAME.equals(t.text)
&& !serdeConstants.UNION_TYPE_NAME.equals(t.text)
&& null == PrimitiveObjectInspectorUtils
.getTypeEntryFromTypeName(t.text)
&& !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
} else if (item.equals("name")) {
if (!t.isType && !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
} else {
if (!item.equals(t.text) && !t.text.equals(alternative)) {
throw new IllegalArgumentException("Error: " + item
+ " expected at the position " + t.position + " of '"
+ typeInfoString + "' but '" + t.text + "' is found.");
}
}
iToken++;
return t;
}
private String[] parseParams() {
List<String> params = new LinkedList<String>();
Token t = peek();
if (t != null && t.text.equals("(")) {
expect("(");
// checking for null in the for-loop condition prevents null-ptr exception
// and allows us to fail more gracefully with a parsing error.
for(t = peek(); (t == null) || !t.text.equals(")"); t = expect(",",")")) {
params.add(expect("name").text);
}
if (params.size() == 0) {
throw new IllegalArgumentException(
"type parameters expected for type string " + typeInfoString);
}
}
return params.toArray(new String[params.size()]);
}
private TypeInfo parseType() {
Token t = expect("type");
// Is this a primitive type?
PrimitiveTypeEntry typeEntry =
PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN ) {
String[] params = parseParams();
switch (typeEntry.primitiveCategory) {
case CHAR:
case VARCHAR:
if (params == null || params.length == 0) {
throw new IllegalArgumentException(typeEntry.typeName
+ " type is specified without length: " + typeInfoString);
}
int length = 1;
if (params.length == 1) {
length = Integer.parseInt(params[0]);
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
BaseCharUtils.validateVarcharParameter(length);
return TypeInfoFactory.getVarcharTypeInfo(length);
} else {
BaseCharUtils.validateCharParameter(length);
return TypeInfoFactory.getCharTypeInfo(length);
}
} else if (params.length > 1) {
throw new IllegalArgumentException(
"Type " + typeEntry.typeName+ " only takes one parameter, but " +
params.length + " is seen");
}
case DECIMAL:
int precision = HiveDecimal.USER_DEFAULT_PRECISION;
int scale = HiveDecimal.USER_DEFAULT_SCALE;
if (params == null || params.length == 0) {
// It's possible that old metadata still refers to "decimal" as a column type w/o
// precision/scale. In this case, the default (10,0) is assumed. Thus, do nothing here.
} else if (params.length == 2) {
// New metadata always have two parameters.
precision = Integer.parseInt(params[0]);
scale = Integer.parseInt(params[1]);
HiveDecimalUtils.validateParameter(precision, scale);
} else if (params.length > 2) {
throw new IllegalArgumentException("Type decimal only takes two parameter, but " +
params.length + " is seen");
}
return TypeInfoFactory.getDecimalTypeInfo(precision, scale);
default:
return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName);
}
}
// Is this a list type?
if (serdeConstants.LIST_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo listElementType = parseType();
expect(">");
return TypeInfoFactory.getListTypeInfo(listElementType);
}
// Is this a map type?
if (serdeConstants.MAP_TYPE_NAME.equals(t.text)) {
expect("<");
TypeInfo mapKeyType = parseType();
expect(",");
TypeInfo mapValueType = parseType();
expect(">");
return TypeInfoFactory.getMapTypeInfo(mapKeyType, mapValueType);
}
// Is this a struct type?
if (serdeConstants.STRUCT_TYPE_NAME.equals(t.text)) {
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of struct
break;
}
}
Token name = expect("name",">");
if (name.text.equals(">")) {
break;
}
fieldNames.add(name.text);
expect(":");
fieldTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
}
// Is this a union type?
if (serdeConstants.UNION_TYPE_NAME.equals(t.text)) {
List<TypeInfo> objectTypeInfos = new ArrayList<TypeInfo>();
boolean first = true;
do {
if (first) {
expect("<");
first = false;
} else {
Token separator = expect(">", ",");
if (separator.text.equals(">")) {
// end of union
break;
}
}
objectTypeInfos.add(parseType());
} while (true);
return TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
}
throw new RuntimeException("Internal error parsing position "
+ t.position + " of '" + typeInfoString + "'");
}
public PrimitiveParts parsePrimitiveParts() {
PrimitiveParts parts = new PrimitiveParts();
Token t = expect("type");
parts.typeName = t.text;
parts.typeParams = parseParams();
return parts;
}
}
public static class PrimitiveParts {
public String typeName;
public String[] typeParams;
}
/**
* Make some of the TypeInfo parsing available as a utility.
*/
public static PrimitiveParts parsePrimitiveParts(String typeInfoString) {
TypeInfoParser parser = new TypeInfoParser(typeInfoString);
return parser.parsePrimitiveParts();
}
static ConcurrentHashMap<TypeInfo, ObjectInspector> cachedStandardObjectInspector =
new ConcurrentHashMap<TypeInfo, ObjectInspector>();
/**
* Returns the standard object inspector that can be used to translate an
* object of that typeInfo to a standard object type.
*/
public static ObjectInspector getStandardWritableObjectInspectorFromTypeInfo(
TypeInfo typeInfo) {
ObjectInspector result = cachedStandardObjectInspector.get(typeInfo);
if (result == null) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
result = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
(PrimitiveTypeInfo) typeInfo);
break;
}
case LIST: {
ObjectInspector elementObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo)
.getListElementTypeInfo());
result = ObjectInspectorFactory
.getStandardListObjectInspector(elementObjectInspector);
break;
}
case MAP: {
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
ObjectInspector keyObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapKeyTypeInfo());
ObjectInspector valueObjectInspector =
getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo());
result = ObjectInspectorFactory.getStandardMapObjectInspector(
keyObjectInspector, valueObjectInspector);
break;
}
case STRUCT: {
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = structTypeInfo
.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardWritableObjectInspectorFromTypeInfo(fieldTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldObjectInspectors);
break;
}
case UNION: {
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> objectTypeInfos = unionTypeInfo
.getAllUnionObjectTypeInfos();
List<ObjectInspector> fieldObjectInspectors =
new ArrayList<ObjectInspector>(objectTypeInfos.size());
for (int i = 0; i < objectTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardWritableObjectInspectorFromTypeInfo(objectTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardUnionObjectInspector(
fieldObjectInspectors);
break;
}
default: {
result = null;
}
}
ObjectInspector prev =
cachedStandardObjectInspector.putIfAbsent(typeInfo, result);
if (prev != null) {
result = prev;
}
}
return result;
}
static ConcurrentHashMap<TypeInfo, ObjectInspector> cachedStandardJavaObjectInspector =
new ConcurrentHashMap<TypeInfo, ObjectInspector>();
/**
* Returns the standard object inspector that can be used to translate an
* object of that typeInfo to a standard object type.
*/
public static ObjectInspector getStandardJavaObjectInspectorFromTypeInfo(
TypeInfo typeInfo) {
ObjectInspector result = cachedStandardJavaObjectInspector.get(typeInfo);
if (result == null) {
switch (typeInfo.getCategory()) {
case PRIMITIVE: {
// NOTE: we use JavaPrimitiveObjectInspector instead of
// StandardPrimitiveObjectInspector
result = PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector((PrimitiveTypeInfo) typeInfo);
break;
}
case LIST: {
ObjectInspector elementObjectInspector =
getStandardJavaObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo)
.getListElementTypeInfo());
result = ObjectInspectorFactory
.getStandardListObjectInspector(elementObjectInspector);
break;
}
case MAP: {
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
ObjectInspector keyObjectInspector = getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo
.getMapKeyTypeInfo());
ObjectInspector valueObjectInspector =
getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo());
result = ObjectInspectorFactory.getStandardMapObjectInspector(
keyObjectInspector, valueObjectInspector);
break;
}
case STRUCT: {
StructTypeInfo strucTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = strucTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = strucTypeInfo
.getAllStructFieldTypeInfos();
List<ObjectInspector> fieldObjectInspectors = new ArrayList<ObjectInspector>(
fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardJavaObjectInspectorFromTypeInfo(fieldTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardStructObjectInspector(
fieldNames, fieldObjectInspectors);
break;
}
case UNION: {
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> objectTypeInfos = unionTypeInfo
.getAllUnionObjectTypeInfos();
List<ObjectInspector> fieldObjectInspectors =
new ArrayList<ObjectInspector>(objectTypeInfos.size());
for (int i = 0; i < objectTypeInfos.size(); i++) {
fieldObjectInspectors
.add(getStandardJavaObjectInspectorFromTypeInfo(objectTypeInfos
.get(i)));
}
result = ObjectInspectorFactory.getStandardUnionObjectInspector(
fieldObjectInspectors);
break;
}
default: {
result = null;
}
}
ObjectInspector prev =
cachedStandardJavaObjectInspector.putIfAbsent(typeInfo, result);
if (prev != null) {
result = prev;
}
}
return result;
}
/**
* Get the TypeInfo object from the ObjectInspector object by recursively
* going into the ObjectInspector structure.
*/
public static TypeInfo getTypeInfoFromObjectInspector(ObjectInspector oi) {
// OPTIMIZATION for later.
// if (oi instanceof TypeInfoBasedObjectInspector) {
// TypeInfoBasedObjectInspector typeInfoBasedObjectInspector =
// (ObjectInspector)oi;
// return typeInfoBasedObjectInspector.getTypeInfo();
// }
if (oi == null) {
return null;
}
// Recursively going into ObjectInspector structure
TypeInfo result = null;
switch (oi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
result = poi.getTypeInfo();
break;
}
case LIST: {
ListObjectInspector loi = (ListObjectInspector) oi;
result = TypeInfoFactory
.getListTypeInfo(getTypeInfoFromObjectInspector(loi
.getListElementObjectInspector()));
break;
}
case MAP: {
MapObjectInspector moi = (MapObjectInspector) oi;
result = TypeInfoFactory.getMapTypeInfo(
getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()),
getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector()));
break;
}
case STRUCT: {
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<String> fieldNames = new ArrayList<String>(fields.size());
List<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.size());
for (StructField f : fields) {
fieldNames.add(f.getFieldName());
fieldTypeInfos.add(getTypeInfoFromObjectInspector(f
.getFieldObjectInspector()));
}
result = TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
break;
}
case UNION: {
UnionObjectInspector uoi = (UnionObjectInspector) oi;
List<TypeInfo> objectTypeInfos = new ArrayList<TypeInfo>();
for (ObjectInspector eoi : uoi.getObjectInspectors()) {
objectTypeInfos.add(getTypeInfoFromObjectInspector(eoi));
}
result = TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
break;
}
default: {
throw new RuntimeException("Unknown ObjectInspector category!");
}
}
return result;
}
public static ArrayList<TypeInfo> typeInfosFromStructObjectInspector(
StructObjectInspector structObjectInspector) {
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
ArrayList<TypeInfo> typeInfoList = new ArrayList<TypeInfo>(fields.size());
for(StructField field : fields) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(
field.getFieldObjectInspector().getTypeName());
typeInfoList.add(typeInfo);
}
return typeInfoList;
}
public static ArrayList<TypeInfo> typeInfosFromTypeNames(List<String> typeNames) {
ArrayList<TypeInfo> result = new ArrayList<TypeInfo>(typeNames.size());
for(int i = 0; i < typeNames.size(); i++) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i));
result.add(typeInfo);
}
return result;
}
public static ArrayList<TypeInfo> getTypeInfosFromTypeString(String typeString) {
TypeInfoParser parser = new TypeInfoParser(typeString);
return parser.parseTypeInfos();
}
public static List<String> getTypeStringsFromTypeInfo(List<TypeInfo> typeInfos) {
if (typeInfos == null) {
return null;
}
List<String> result = new ArrayList<>(typeInfos.size());
for (TypeInfo typeInfo : typeInfos) {
result.add(typeInfo.toString());
}
return result;
}
public static TypeInfo getTypeInfoFromTypeString(String typeString) {
TypeInfoParser parser = new TypeInfoParser(typeString);
return parser.parseTypeInfos().get(0);
}
/**
* Given two types, determine whether conversion needs to occur to compare the two types.
* This is needed for cases like varchar, where the TypeInfo for varchar(10) != varchar(5),
* but there would be no need to have to convert to compare these values.
* @param typeA
* @param typeB
* @return
*/
public static boolean isConversionRequiredForComparison(TypeInfo typeA, TypeInfo typeB) {
if (typeA.equals(typeB)) {
return false;
}
if (TypeInfoUtils.doPrimitiveCategoriesMatch(typeA, typeB)) {
return false;
}
return true;
}
/**
* Return the character length of the type
* @param typeInfo
* @return
*/
public static int getCharacterLengthForType(PrimitiveTypeInfo typeInfo) {
switch (typeInfo.getPrimitiveCategory()) {
case STRING:
return HiveVarchar.MAX_VARCHAR_LENGTH;
case CHAR:
case VARCHAR:
BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) typeInfo;
return baseCharTypeInfo.getLength();
default:
return 0;
}
}
public static void registerNumericType(PrimitiveCategory primitiveCategory, int level) {
numericTypeList.add(primitiveCategory);
numericTypes.put(primitiveCategory, level);
}
public static boolean implicitConvertible(PrimitiveCategory from, PrimitiveCategory to) {
if (from == to) {
return true;
}
PrimitiveGrouping fromPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(from);
PrimitiveGrouping toPg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(to);
// Allow implicit String to Double conversion
if (fromPg == PrimitiveGrouping.STRING_GROUP && to == PrimitiveCategory.DOUBLE) {
return true;
}
// Allow implicit String to Decimal conversion
if (fromPg == PrimitiveGrouping.STRING_GROUP && to == PrimitiveCategory.DECIMAL) {
return true;
}
// Void can be converted to any type
if (from == PrimitiveCategory.VOID) {
return true;
}
// Allow implicit String to Date conversion
if (fromPg == PrimitiveGrouping.DATE_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit Numeric to String conversion
if (fromPg == PrimitiveGrouping.NUMERIC_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit String to varchar conversion, and vice versa
if (fromPg == PrimitiveGrouping.STRING_GROUP && toPg == PrimitiveGrouping.STRING_GROUP) {
return true;
}
// Allow implicit conversion from Byte -> Integer -> Long -> Float -> Double
// Decimal -> String
Integer f = numericTypes.get(from);
Integer t = numericTypes.get(to);
if (f == null || t == null) {
return false;
}
if (f.intValue() > t.intValue()) {
return false;
}
return true;
}
/**
* Returns whether it is possible to implicitly convert an object of Class
* from to Class to.
*/
public static boolean implicitConvertible(TypeInfo from, TypeInfo to) {
if (from.equals(to)) {
return true;
}
// Reimplemented to use PrimitiveCategory rather than TypeInfo, because
// 2 TypeInfos from the same qualified type (varchar, decimal) should still be
// seen as equivalent.
if (from.getCategory() == Category.PRIMITIVE && to.getCategory() == Category.PRIMITIVE) {
return implicitConvertible(
((PrimitiveTypeInfo) from).getPrimitiveCategory(),
((PrimitiveTypeInfo) to).getPrimitiveCategory());
}
return false;
}
}
|
googleapis/google-cloud-java | 35,289 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateCustomDimensionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateCustomDimension RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateCustomDimensionRequest}
*/
public final class UpdateCustomDimensionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateCustomDimensionRequest)
UpdateCustomDimensionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateCustomDimensionRequest.newBuilder() to construct.
private UpdateCustomDimensionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateCustomDimensionRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateCustomDimensionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateCustomDimensionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateCustomDimensionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.class,
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.Builder.class);
}
private int bitField0_;
public static final int CUSTOM_DIMENSION_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.CustomDimension customDimension_;
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*
* @return Whether the customDimension field is set.
*/
@java.lang.Override
public boolean hasCustomDimension() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*
* @return The customDimension.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.CustomDimension getCustomDimension() {
return customDimension_ == null
? com.google.analytics.admin.v1alpha.CustomDimension.getDefaultInstance()
: customDimension_;
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.CustomDimensionOrBuilder getCustomDimensionOrBuilder() {
return customDimension_ == null
? com.google.analytics.admin.v1alpha.CustomDimension.getDefaultInstance()
: customDimension_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCustomDimension());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCustomDimension());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest other =
(com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest) obj;
if (hasCustomDimension() != other.hasCustomDimension()) return false;
if (hasCustomDimension()) {
if (!getCustomDimension().equals(other.getCustomDimension())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCustomDimension()) {
hash = (37 * hash) + CUSTOM_DIMENSION_FIELD_NUMBER;
hash = (53 * hash) + getCustomDimension().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateCustomDimension RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateCustomDimensionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateCustomDimensionRequest)
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateCustomDimensionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateCustomDimensionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.class,
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCustomDimensionFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customDimension_ = null;
if (customDimensionBuilder_ != null) {
customDimensionBuilder_.dispose();
customDimensionBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateCustomDimensionRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest build() {
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest result =
new com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customDimension_ =
customDimensionBuilder_ == null ? customDimension_ : customDimensionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest other) {
if (other
== com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest.getDefaultInstance())
return this;
if (other.hasCustomDimension()) {
mergeCustomDimension(other.getCustomDimension());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCustomDimensionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.CustomDimension customDimension_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.CustomDimension,
com.google.analytics.admin.v1alpha.CustomDimension.Builder,
com.google.analytics.admin.v1alpha.CustomDimensionOrBuilder>
customDimensionBuilder_;
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*
* @return Whether the customDimension field is set.
*/
public boolean hasCustomDimension() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*
* @return The customDimension.
*/
public com.google.analytics.admin.v1alpha.CustomDimension getCustomDimension() {
if (customDimensionBuilder_ == null) {
return customDimension_ == null
? com.google.analytics.admin.v1alpha.CustomDimension.getDefaultInstance()
: customDimension_;
} else {
return customDimensionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public Builder setCustomDimension(com.google.analytics.admin.v1alpha.CustomDimension value) {
if (customDimensionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
customDimension_ = value;
} else {
customDimensionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public Builder setCustomDimension(
com.google.analytics.admin.v1alpha.CustomDimension.Builder builderForValue) {
if (customDimensionBuilder_ == null) {
customDimension_ = builderForValue.build();
} else {
customDimensionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public Builder mergeCustomDimension(com.google.analytics.admin.v1alpha.CustomDimension value) {
if (customDimensionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& customDimension_ != null
&& customDimension_
!= com.google.analytics.admin.v1alpha.CustomDimension.getDefaultInstance()) {
getCustomDimensionBuilder().mergeFrom(value);
} else {
customDimension_ = value;
}
} else {
customDimensionBuilder_.mergeFrom(value);
}
if (customDimension_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public Builder clearCustomDimension() {
bitField0_ = (bitField0_ & ~0x00000001);
customDimension_ = null;
if (customDimensionBuilder_ != null) {
customDimensionBuilder_.dispose();
customDimensionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public com.google.analytics.admin.v1alpha.CustomDimension.Builder getCustomDimensionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCustomDimensionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
public com.google.analytics.admin.v1alpha.CustomDimensionOrBuilder
getCustomDimensionOrBuilder() {
if (customDimensionBuilder_ != null) {
return customDimensionBuilder_.getMessageOrBuilder();
} else {
return customDimension_ == null
? com.google.analytics.admin.v1alpha.CustomDimension.getDefaultInstance()
: customDimension_;
}
}
/**
*
*
* <pre>
* The CustomDimension to update
* </pre>
*
* <code>.google.analytics.admin.v1alpha.CustomDimension custom_dimension = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.CustomDimension,
com.google.analytics.admin.v1alpha.CustomDimension.Builder,
com.google.analytics.admin.v1alpha.CustomDimensionOrBuilder>
getCustomDimensionFieldBuilder() {
if (customDimensionBuilder_ == null) {
customDimensionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.CustomDimension,
com.google.analytics.admin.v1alpha.CustomDimension.Builder,
com.google.analytics.admin.v1alpha.CustomDimensionOrBuilder>(
getCustomDimension(), getParentForChildren(), isClean());
customDimension_ = null;
}
return customDimensionBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Omitted fields will not be
* updated. To replace the entire entity, use one path with the string "*" to
* match all fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateCustomDimensionRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateCustomDimensionRequest)
private static final com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateCustomDimensionRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateCustomDimensionRequest>() {
@java.lang.Override
public UpdateCustomDimensionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateCustomDimensionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateCustomDimensionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateCustomDimensionRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/guava | 35,574 | android/guava/src/com/google/common/collect/FluentIterable.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.errorprone.annotations.InlineMe;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.SortedSet;
import java.util.stream.Stream;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
/**
* An expanded {@code Iterable} API, providing functionality similar to Java 8's powerful <a href=
* "https://docs.oracle.com/javase/8/docs/api/java/util/stream/package-summary.html#package.description"
* >streams library</a> in a slightly different way.
*
* <p>The following types of methods are provided:
*
* <ul>
* <li>chaining methods which return a new {@code FluentIterable} based in some way on the
* contents of the current one (for example {@link #transform})
* <li>element extraction methods which facilitate the retrieval of certain elements (for example
* {@link #last})
* <li>query methods which answer questions about the {@code FluentIterable}'s contents (for
* example {@link #anyMatch})
* <li>conversion methods which copy the {@code FluentIterable}'s contents into a new collection
* or array (for example {@link #toList})
* </ul>
*
* <p>Several lesser-used features are currently available only as static methods on the {@link
* Iterables} class.
*
* <p><a id="streams"></a>
*
* <h3>Comparison to streams</h3>
*
* <p>Starting with Java 8, the core Java class libraries provide a new "Streams" library (in {@code
* java.util.stream}), which is similar to {@code FluentIterable} but generally more powerful. Key
* differences include:
*
* <ul>
* <li>A stream is <i>single-use</i>; it becomes invalid as soon as any "terminal operation" such
* as {@code findFirst()} or {@code iterator()} is invoked. (Even though {@code Stream}
* contains all the right method <i>signatures</i> to implement {@link Iterable}, it does not
* actually do so, to avoid implying repeat-iterability.) {@code FluentIterable}, on the other
* hand, is multiple-use, and does implement {@link Iterable}.
* <li>Streams offer many features not found here, including {@code min/max}, {@code distinct},
* {@code reduce}, {@code sorted}, the very powerful {@code collect}, and built-in support for
* parallelizing stream operations.
* <li>{@code FluentIterable} contains several features not available on {@code Stream}, which are
* noted in the method descriptions below.
* <li>Streams include primitive-specialized variants such as {@code IntStream}, the use of which
* is strongly recommended.
* <li>Streams are standard Java, not requiring a third-party dependency (but requiring <a
* href="https://developer.android.com/studio/write/java8-support#library-desugaring">library
* desugaring</a> or <a
* href="https://developer.android.com/reference/java/util/stream/Stream">API Level 24</a>
* under Android).
* </ul>
*
* <h3>Example</h3>
*
* <p>Here is an example that accepts a list from a database call, filters it based on a predicate,
* transforms it by invoking {@code toString()} on each element, and returns the first 10 elements
* as a {@code List}:
*
* {@snippet :
* ImmutableList<String> results =
* FluentIterable.from(database.getClientList())
* .filter(Client::isActiveInLastMonth)
* .transform(Object::toString)
* .limit(10)
* .toList();
* }
*
* The approximate stream equivalent is:
*
* {@snippet :
* List<String> results =
* database.getClientList()
* .stream()
* .filter(Client::isActiveInLastMonth)
* .map(Object::toString)
* .limit(10)
* .collect(Collectors.toList());
* }
*
* @author Marcin Mikosik
* @since 12.0
*/
@GwtCompatible
public abstract class FluentIterable<E extends @Nullable Object> implements Iterable<E> {
// We store 'iterable' and use it instead of 'this' to allow Iterables to perform instanceof
// checks on the _original_ iterable when FluentIterable.from is used.
// To avoid a self retain cycle under j2objc, we store Optional.absent() instead of
// Optional.of(this). To access the delegate iterable, call #getDelegate(), which converts to
// absent() back to 'this'.
private final Optional<Iterable<E>> iterableDelegate;
/** Constructor for use by subclasses. */
protected FluentIterable() {
this.iterableDelegate = Optional.absent();
}
FluentIterable(Iterable<E> iterable) {
this.iterableDelegate = Optional.of(iterable);
}
private Iterable<E> getDelegate() {
return iterableDelegate.or(this);
}
/**
* Returns a fluent iterable that wraps {@code iterable}, or {@code iterable} itself if it is
* already a {@code FluentIterable}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Collection#stream} if {@code iterable} is a {@link
* Collection}; {@link Streams#stream(Iterable)} otherwise.
*/
public static <E extends @Nullable Object> FluentIterable<E> from(Iterable<E> iterable) {
return (iterable instanceof FluentIterable)
? (FluentIterable<E>) iterable
: new FluentIterable<E>(iterable) {
@Override
public Iterator<E> iterator() {
return iterable.iterator();
}
};
}
/**
* Returns a fluent iterable containing {@code elements} in the specified order.
*
* <p>The returned iterable is an unmodifiable view of the input array.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0 (since 18.0 as an overload of {@code of})
*/
public static <E extends @Nullable Object> FluentIterable<E> from(E[] elements) {
return from(Arrays.asList(elements));
}
/**
* Construct a fluent iterable from another fluent iterable. This is obviously never necessary,
* but is intended to help call out cases where one migration from {@code Iterable} to {@code
* FluentIterable} has obviated the need to explicitly convert to a {@code FluentIterable}.
*
* @deprecated instances of {@code FluentIterable} don't need to be converted to {@code
* FluentIterable}
*/
@Deprecated
@InlineMe(
replacement = "checkNotNull(iterable)",
staticImports = {"com.google.common.base.Preconditions.checkNotNull"})
public static <E extends @Nullable Object> FluentIterable<E> from(FluentIterable<E> iterable) {
return checkNotNull(iterable);
}
/**
* Returns a fluent iterable that combines two iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}. The source
* iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b) {
return concatNoDefensiveCopy(a, b);
}
/**
* Returns a fluent iterable that combines three iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}. The source iterators are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a, Iterable<? extends T> b, Iterable<? extends T> c) {
return concatNoDefensiveCopy(a, b, c);
}
/**
* Returns a fluent iterable that combines four iterables. The returned iterable has an iterator
* that traverses the elements in {@code a}, followed by the elements in {@code b}, followed by
* the elements in {@code c}, followed by the elements in {@code d}. The source iterators are not
* polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> use nested calls to {@link Stream#concat}, or see the
* advice in {@link #concat(Iterable...)}.
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T> a,
Iterable<? extends T> b,
Iterable<? extends T> c,
Iterable<? extends T> d) {
return concatNoDefensiveCopy(a, b, c, d);
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it.
*
* <p><b>{@code Stream} equivalent:</b> to concatenate an arbitrary number of streams, use {@code
* Stream.of(stream1, stream2, ...).flatMap(s -> s)}. If the sources are iterables, use {@code
* Stream.of(iter1, iter2, ...).flatMap(Streams::stream)}.
*
* @throws NullPointerException if any of the provided iterables is {@code null}
* @since 20.0
*/
@SafeVarargs
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends T>... inputs) {
return concatNoDefensiveCopy(Arrays.copyOf(inputs, inputs.length));
}
/**
* Returns a fluent iterable that combines several iterables. The returned iterable has an
* iterator that traverses the elements of each iterable in {@code inputs}. The input iterators
* are not polled until necessary.
*
* <p>The returned iterable's iterator supports {@code remove()} when the corresponding input
* iterator supports it. The methods of the returned iterable may throw {@code
* NullPointerException} if any of the input iterators is {@code null}.
*
* <p><b>{@code Stream} equivalent:</b> {@code streamOfStreams.flatMap(s -> s)} or {@code
* streamOfIterables.flatMap(Streams::stream)}. (See {@link Streams#stream}.)
*
* @since 20.0
*/
public static <T extends @Nullable Object> FluentIterable<T> concat(
Iterable<? extends Iterable<? extends T>> inputs) {
checkNotNull(inputs);
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(Iterators.transform(inputs.iterator(), Iterable::iterator));
}
};
}
/** Concatenates a varargs array of iterables without making a defensive copy of the array. */
private static <T extends @Nullable Object> FluentIterable<T> concatNoDefensiveCopy(
Iterable<? extends T>... inputs) {
for (Iterable<? extends T> input : inputs) {
checkNotNull(input);
}
return new FluentIterable<T>() {
@Override
public Iterator<T> iterator() {
return Iterators.concat(
/* lazily generate the iterators on each input only as needed */
new AbstractIndexedListIterator<Iterator<? extends T>>(inputs.length) {
@Override
public Iterator<? extends T> get(int i) {
return inputs[i].iterator();
}
});
}
};
}
/**
* Returns a fluent iterable containing no elements.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#empty}.
*
* @since 20.0
*/
@SuppressWarnings("EmptyList") // ImmutableList doesn't support nullable element types
public static <E extends @Nullable Object> FluentIterable<E> of() {
return FluentIterable.from(Collections.emptyList());
}
/**
* Returns a fluent iterable containing the specified elements in order.
*
* <p><b>{@code Stream} equivalent:</b> {@link java.util.stream.Stream#of(Object[])
* Stream.of(T...)}.
*
* @since 20.0
*/
public static <E extends @Nullable Object> FluentIterable<E> of(
@ParametricNullness E element, E... elements) {
return from(Lists.asList(element, elements));
}
/**
* Returns a string representation of this fluent iterable, with the format {@code [e1, e2, ...,
* en]}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(Collectors.joining(", ", "[", "]"))}
* or (less efficiently) {@code stream.collect(Collectors.toList()).toString()}.
*/
@Override
public String toString() {
return Iterables.toString(getDelegate());
}
/**
* Returns the number of elements in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#count}.
*/
public final int size() {
return Iterables.size(getDelegate());
}
/**
* Returns {@code true} if this fluent iterable contains any object for which {@code
* equals(target)} is true.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.anyMatch(Predicate.isEqual(target))}.
*/
public final boolean contains(@Nullable Object target) {
return Iterables.contains(getDelegate(), target);
}
/**
* Returns a fluent iterable whose {@code Iterator} cycles indefinitely over the elements of this
* fluent iterable.
*
* <p>That iterator supports {@code remove()} if {@code iterable.iterator()} does. After {@code
* remove()} is called, subsequent cycles omit the removed element, which is no longer in this
* fluent iterable. The iterator's {@code hasNext()} method returns {@code true} until this fluent
* iterable is empty.
*
* <p><b>Warning:</b> Typical uses of the resulting iterator may produce an infinite loop. You
* should use an explicit {@code break} or be certain that you will eventually remove all the
* elements.
*
* <p><b>{@code Stream} equivalent:</b> if the source iterable has only a single element {@code
* e}, use {@code Stream.generate(() -> e)}. Otherwise, collect your stream into a collection and
* use {@code Stream.generate(() -> collection).flatMap(Collection::stream)}.
*/
public final FluentIterable<E> cycle() {
return from(Iterables.cycle(getDelegate()));
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by those of {@code other}. The iterators are not polled until necessary.
*
* <p>The returned iterable's {@code Iterator} supports {@code remove()} when the corresponding
* {@code Iterator} supports it.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#concat}.
*
* @since 18.0
*/
public final FluentIterable<E> append(Iterable<? extends E> other) {
return FluentIterable.concat(getDelegate(), other);
}
/**
* Returns a fluent iterable whose iterators traverse first the elements of this fluent iterable,
* followed by {@code elements}.
*
* <p><b>{@code Stream} equivalent:</b> {@code Stream.concat(thisStream, Stream.of(elements))}.
*
* @since 18.0
*/
public final FluentIterable<E> append(E... elements) {
return FluentIterable.concat(getDelegate(), Arrays.asList(elements));
}
/**
* Returns the elements from this fluent iterable that satisfy a predicate. The resulting fluent
* iterable's iterator does not support {@code remove()}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#filter} (same).
*/
public final FluentIterable<E> filter(Predicate<? super E> predicate) {
return from(Iterables.filter(getDelegate(), predicate));
}
/**
* Returns the elements from this fluent iterable that are instances of class {@code type}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(type::isInstance).map(type::cast)}.
* This does perform a little more work than necessary, so another option is to insert an
* unchecked cast at some later point:
*
* {@snippet :
* @SuppressWarnings("unchecked") // safe because of ::isInstance check
* ImmutableList<NewType> result =
* (ImmutableList) stream.filter(NewType.class::isInstance).collect(toImmutableList());
* }
*/
@GwtIncompatible // Class.isInstance
public final <T> FluentIterable<T> filter(Class<T> type) {
return from(Iterables.filter(getDelegate(), type));
}
/**
* Returns {@code true} if any element in this fluent iterable satisfies the predicate.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#anyMatch} (same).
*/
public final boolean anyMatch(Predicate<? super E> predicate) {
return Iterables.any(getDelegate(), predicate);
}
/**
* Returns {@code true} if every element in this fluent iterable satisfies the predicate. If this
* fluent iterable is empty, {@code true} is returned.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#allMatch} (same).
*/
public final boolean allMatch(Predicate<? super E> predicate) {
return Iterables.all(getDelegate(), predicate);
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable that satisfies
* the given predicate, if such an element exists.
*
* <p><b>Warning:</b> avoid using a {@code predicate} that matches {@code null}. If {@code null}
* is matched in this fluent iterable, a {@link NullPointerException} will be thrown.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.filter(predicate).findFirst()}.
*/
public final Optional<@NonNull E> firstMatch(Predicate<? super E> predicate) {
// Unsafe, but we can't do much about it now.
return Iterables.<@NonNull E>tryFind((Iterable<@NonNull E>) getDelegate(), predicate);
}
/**
* Returns a fluent iterable that applies {@code function} to each element of this fluent
* iterable.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this iterable's
* iterator does. After a successful {@code remove()} call, this fluent iterable no longer
* contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#map}.
*/
public final <T extends @Nullable Object> FluentIterable<T> transform(
Function<? super E, T> function) {
return from(Iterables.transform(getDelegate(), function));
}
/**
* Applies {@code function} to each element of this fluent iterable and returns a fluent iterable
* with the concatenated combination of results. {@code function} returns an Iterable of results.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if this function-returned
* iterables' iterator does. After a successful {@code remove()} call, the returned fluent
* iterable no longer contains the corresponding element.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#flatMap} (using a function that produces
* streams, not iterables).
*
* @since 13.0 (required {@code Function<E, Iterable<T>>} until 14.0)
*/
public <T extends @Nullable Object> FluentIterable<T> transformAndConcat(
Function<? super E, ? extends Iterable<? extends T>> function) {
return FluentIterable.concat(transform(function));
}
/**
* Returns an {@link Optional} containing the first element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned.
*
* <p><b>{@code Stream} equivalent:</b> if the goal is to obtain any element, {@link
* Stream#findAny}; if it must specifically be the <i>first</i> element, {@code Stream#findFirst}.
*
* @throws NullPointerException if the first element is null; if this is a possibility, use {@code
* iterator().next()} or {@link Iterables#getFirst} instead.
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final Optional<@NonNull E> first() {
Iterator<E> iterator = getDelegate().iterator();
return iterator.hasNext() ? Optional.of(iterator.next()) : Optional.absent();
}
/**
* Returns an {@link Optional} containing the last element in this fluent iterable. If the
* iterable is empty, {@code Optional.absent()} is returned. If the underlying {@code iterable} is
* a {@link List} with {@link java.util.RandomAccess} support, then this operation is guaranteed
* to be {@code O(1)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.reduce((a, b) -> b)}.
*
* @throws NullPointerException if the last element is null; if this is a possibility, use {@link
* Iterables#getLast} instead.
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final Optional<@NonNull E> last() {
// Iterables#getLast was inlined here so we don't have to throw/catch a NSEE
// TODO(kevinb): Support a concurrently modified collection?
Iterable<E> iterable = getDelegate();
if (iterable instanceof List) {
List<E> list = (List<E>) iterable;
if (list.isEmpty()) {
return Optional.absent();
}
return Optional.of(list.get(list.size() - 1));
}
Iterator<E> iterator = iterable.iterator();
if (!iterator.hasNext()) {
return Optional.absent();
}
/*
* TODO(kevinb): consider whether this "optimization" is worthwhile. Users with SortedSets tend
* to know they are SortedSets and probably would not call this method.
*/
if (iterable instanceof SortedSet) {
SortedSet<E> sortedSet = (SortedSet<E>) iterable;
return Optional.of(sortedSet.last());
}
while (true) {
E current = iterator.next();
if (!iterator.hasNext()) {
return Optional.of(current);
}
}
}
/**
* Returns a view of this fluent iterable that skips its first {@code numberToSkip} elements. If
* this fluent iterable contains fewer than {@code numberToSkip} elements, the returned fluent
* iterable skips all of its elements.
*
* <p>Modifications to this fluent iterable before a call to {@code iterator()} are reflected in
* the returned fluent iterable. That is, the iterator skips the first {@code numberToSkip}
* elements that exist when the iterator is created, not when {@code skip()} is called.
*
* <p>The returned fluent iterable's iterator supports {@code remove()} if the {@code Iterator} of
* this fluent iterable supports it. Note that it is <i>not</i> possible to delete the last
* skipped element by immediately calling {@code remove()} on the returned fluent iterable's
* iterator, as the {@code Iterator} contract states that a call to {@code * remove()} before a
* call to {@code next()} will throw an {@link IllegalStateException}.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#skip} (same).
*/
public final FluentIterable<E> skip(int numberToSkip) {
return from(Iterables.skip(getDelegate(), numberToSkip));
}
/**
* Creates a fluent iterable with the first {@code size} elements of this fluent iterable. If this
* fluent iterable does not contain that many elements, the returned fluent iterable will have the
* same behavior as this fluent iterable. The returned fluent iterable's iterator supports {@code
* remove()} if this fluent iterable's iterator does.
*
* <p><b>{@code Stream} equivalent:</b> {@link Stream#limit} (same).
*
* @param maxSize the maximum number of elements in the returned fluent iterable
* @throws IllegalArgumentException if {@code size} is negative
*/
public final FluentIterable<E> limit(int maxSize) {
return from(Iterables.limit(getDelegate(), maxSize));
}
/**
* Determines whether this fluent iterable is empty.
*
* <p><b>{@code Stream} equivalent:</b> {@code !stream.findAny().isPresent()}.
*/
public final boolean isEmpty() {
return !getDelegate().iterator().hasNext();
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this fluent iterable in
* proper sequence.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableList#toImmutableList} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableList()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableList<@NonNull E> toList() {
return ImmutableList.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableList} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}. To produce an {@code
* ImmutableList} sorted by its natural ordering, use {@code toSortedList(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableList#toImmutableList} to {@code
* stream.sorted(comparator).collect()}.
*
* @param comparator the function by which to sort list elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 13.0 as {@code toSortedImmutableList()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableList<@NonNull E> toSortedList(Comparator<? super E> comparator) {
return Ordering.from(comparator).immutableSortedCopy((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableSet} containing all of the elements from this fluent iterable with
* duplicates removed.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableSet#toImmutableSet} to {@code
* stream.collect()}.
*
* @throws NullPointerException if any element is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSet()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableSet<@NonNull E> toSet() {
return ImmutableSet.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableSortedSet} containing all of the elements from this {@code
* FluentIterable} in the order specified by {@code comparator}, with duplicates (determined by
* {@code comparator.compare(x, y) == 0}) removed. To produce an {@code ImmutableSortedSet} sorted
* by its natural ordering, use {@code toSortedSet(Ordering.natural())}.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableSortedSet#toImmutableSortedSet} to
* {@code stream.collect()}.
*
* @param comparator the function by which to sort set elements
* @throws NullPointerException if any element of this iterable is {@code null}
* @since 14.0 (since 12.0 as {@code toImmutableSortedSet()}).
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableSortedSet<@NonNull E> toSortedSet(Comparator<? super E> comparator) {
return ImmutableSortedSet.copyOf(comparator, (Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an {@code ImmutableMultiset} containing all of the elements from this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> pass {@link ImmutableMultiset#toImmutableMultiset} to
* {@code stream.collect()}.
*
* @throws NullPointerException if any element is null
* @since 19.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final ImmutableMultiset<@NonNull E> toMultiset() {
return ImmutableMultiset.copyOf((Iterable<@NonNull E>) getDelegate());
}
/**
* Returns an immutable map whose keys are the distinct elements of this {@code FluentIterable}
* and whose value for each key was computed by {@code valueFunction}. The map's iteration order
* is the order of the first appearance of each key in this iterable.
*
* <p>When there are multiple instances of a key in this iterable, it is unspecified whether
* {@code valueFunction} will be applied to more than one instance of that key and, if it is,
* which result will be mapped to that key in the returned map.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.collect(ImmutableMap.toImmutableMap(k -> k,
* valueFunction))}.
*
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* valueFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <V> ImmutableMap<@NonNull E, V> toMap(Function<? super E, V> valueFunction) {
return Maps.toMap((Iterable<@NonNull E>) getDelegate(), valueFunction);
}
/**
* Creates an index {@code ImmutableListMultimap} that contains the results of applying a
* specified function to each item in this {@code FluentIterable} of values. Each element of this
* iterable will be stored as a value in the resulting multimap, yielding a multimap with the same
* size as this iterable. The key used to store that value in the multimap will be the result of
* calling the function on that value. The resulting multimap is created as an immutable snapshot.
* In the returned multimap, keys appear in the order they are first encountered, and the values
* corresponding to each key appear in the same order as they are encountered.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.collect(ImmutableListMultimap.toImmutableListMultimap(keyFunction, v -> v))}.
*
* @param keyFunction the function used to produce the key for each value
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <K> ImmutableListMultimap<K, @NonNull E> index(Function<? super E, K> keyFunction) {
return Multimaps.index((Iterable<@NonNull E>) getDelegate(), keyFunction);
}
/**
* Returns a map with the contents of this {@code FluentIterable} as its {@code values}, indexed
* by keys derived from those values. In other words, each input value produces an entry in the
* map whose key is the result of applying {@code keyFunction} to that value. These entries appear
* in the same order as they appeared in this fluent iterable. Example usage:
*
* {@snippet :
* Color red = new Color("red", 255, 0, 0);
* ...
* FluentIterable<Color> allColors = FluentIterable.from(ImmutableSet.of(red, green, blue));
*
* Map<String, Color> colorForName = allColors.uniqueIndex(toStringFunction());
* assertThat(colorForName).containsEntry("red", red);
* }
*
* <p>If your index may associate multiple values with each key, use {@link #index(Function)
* index}.
*
* <p><b>{@code Stream} equivalent:</b> {@code
* stream.collect(ImmutableMap.toImmutableMap(keyFunction, v -> v))}.
*
* @param keyFunction the function used to produce the key for each value
* @return a map mapping the result of evaluating the function {@code keyFunction} on each value
* in this fluent iterable to that value
* @throws IllegalArgumentException if {@code keyFunction} produces the same key for more than one
* value in this fluent iterable
* @throws NullPointerException if any element of this iterable is {@code null}, or if {@code
* keyFunction} produces {@code null} for any key
* @since 14.0
*/
@SuppressWarnings("nullness") // Unsafe, but we can't do much about it now.
public final <K> ImmutableMap<K, @NonNull E> uniqueIndex(Function<? super E, K> keyFunction) {
return Maps.uniqueIndex((Iterable<@NonNull E>) getDelegate(), keyFunction);
}
/**
* Returns an array containing all of the elements from this fluent iterable in iteration order.
*
* <p><b>{@code Stream} equivalent:</b> if an object array is acceptable, use {@code
* stream.toArray()}; if {@code type} is a class literal such as {@code MyType.class}, use {@code
* stream.toArray(MyType[]::new)}. Otherwise use {@code stream.toArray( len -> (E[])
* Array.newInstance(type, len))}.
*
* @param type the type of the elements
* @return a newly-allocated array into which all the elements of this fluent iterable have been
* copied
*/
@GwtIncompatible // Array.newArray(Class, int)
public final E[] toArray(Class<@NonNull E> type) {
return Iterables.<E>toArray(getDelegate(), type);
}
/**
* Copies all the elements from this fluent iterable to {@code collection}. This is equivalent to
* calling {@code Iterables.addAll(collection, this)}.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.forEachOrdered(collection::add)} or {@code
* stream.forEach(collection::add)}.
*
* @param collection the collection to copy elements to
* @return {@code collection}, for convenience
* @since 14.0
*/
@CanIgnoreReturnValue
public final <C extends Collection<? super E>> C copyInto(C collection) {
checkNotNull(collection);
Iterable<E> iterable = getDelegate();
if (iterable instanceof Collection) {
collection.addAll((Collection<E>) iterable);
} else {
for (E item : iterable) {
collection.add(item);
}
}
return collection;
}
/**
* Returns a {@link String} containing all of the elements of this fluent iterable joined with
* {@code joiner}.
*
* <p><b>{@code Stream} equivalent:</b> {@code joiner.join(stream.iterator())}, or, if you are not
* using any optional {@code Joiner} features, {@code
* stream.collect(Collectors.joining(delimiter)}.
*
* @since 18.0
*/
public final String join(Joiner joiner) {
return joiner.join(this);
}
/**
* Returns the element at the specified position in this fluent iterable.
*
* <p><b>{@code Stream} equivalent:</b> {@code stream.skip(position).findFirst().get()} (but note
* that this throws different exception types, and throws an exception if {@code null} would be
* returned).
*
* @param position position of the element to return
* @return the element at the specified position in this fluent iterable
* @throws IndexOutOfBoundsException if {@code position} is negative or greater than or equal to
* the size of this fluent iterable
*/
@ParametricNullness
public final E get(int position) {
return Iterables.get(getDelegate(), position);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.