repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
apache/kafka
38,108
metadata/src/main/java/org/apache/kafka/controller/ClusterControlManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.controller; import org.apache.kafka.common.DirectoryId; import org.apache.kafka.common.Uuid; import org.apache.kafka.common.errors.BrokerIdNotRegisteredException; import org.apache.kafka.common.errors.DuplicateBrokerRegistrationException; import org.apache.kafka.common.errors.InconsistentClusterIdException; import org.apache.kafka.common.errors.InvalidRegistrationException; import org.apache.kafka.common.errors.StaleBrokerEpochException; import org.apache.kafka.common.errors.UnsupportedVersionException; import org.apache.kafka.common.message.BrokerRegistrationRequestData; import org.apache.kafka.common.message.ControllerRegistrationRequestData; import org.apache.kafka.common.metadata.BrokerRegistrationChangeRecord; import org.apache.kafka.common.metadata.FenceBrokerRecord; import org.apache.kafka.common.metadata.RegisterBrokerRecord; import org.apache.kafka.common.metadata.RegisterBrokerRecord.BrokerFeature; import org.apache.kafka.common.metadata.RegisterControllerRecord; import org.apache.kafka.common.metadata.RegisterControllerRecord.ControllerFeatureCollection; import org.apache.kafka.common.metadata.UnfenceBrokerRecord; import org.apache.kafka.common.metadata.UnregisterBrokerRecord; import org.apache.kafka.common.protocol.ApiMessage; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Time; import org.apache.kafka.controller.metrics.QuorumControllerMetrics; import org.apache.kafka.metadata.BrokerRegistration; import org.apache.kafka.metadata.BrokerRegistrationFencingChange; import org.apache.kafka.metadata.BrokerRegistrationInControlledShutdownChange; import org.apache.kafka.metadata.BrokerRegistrationReply; import org.apache.kafka.metadata.ControllerRegistration; import org.apache.kafka.metadata.FinalizedControllerFeatures; import org.apache.kafka.metadata.ListenerInfo; import org.apache.kafka.metadata.VersionRange; import org.apache.kafka.metadata.placement.ReplicaPlacer; import org.apache.kafka.metadata.placement.StripedReplicaPlacer; import org.apache.kafka.metadata.placement.UsableBroker; import org.apache.kafka.server.common.ApiMessageAndVersion; import org.apache.kafka.server.common.MetadataVersion; import org.apache.kafka.timeline.SnapshotRegistry; import org.apache.kafka.timeline.TimelineHashMap; import org.slf4j.Logger; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.OptionalLong; import java.util.Random; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static java.util.concurrent.TimeUnit.NANOSECONDS; /** * The ClusterControlManager manages all the hard state associated with the Kafka cluster. * Hard state is state which appears in the metadata log, such as broker registrations, * brokers being fenced or unfenced, and broker feature versions. */ public class ClusterControlManager { static final long DEFAULT_SESSION_TIMEOUT_NS = NANOSECONDS.convert(9, TimeUnit.SECONDS); static class Builder { private LogContext logContext = null; private String clusterId = null; private Time time = Time.SYSTEM; private SnapshotRegistry snapshotRegistry = null; private long sessionTimeoutNs = DEFAULT_SESSION_TIMEOUT_NS; private ReplicaPlacer replicaPlacer = null; private FeatureControlManager featureControl = null; private BrokerShutdownHandler brokerShutdownHandler = null; private QuorumControllerMetrics metrics = null; Builder setLogContext(LogContext logContext) { this.logContext = logContext; return this; } Builder setClusterId(String clusterId) { this.clusterId = clusterId; return this; } Builder setTime(Time time) { this.time = time; return this; } Builder setSnapshotRegistry(SnapshotRegistry snapshotRegistry) { this.snapshotRegistry = snapshotRegistry; return this; } Builder setSessionTimeoutNs(long sessionTimeoutNs) { this.sessionTimeoutNs = sessionTimeoutNs; return this; } Builder setReplicaPlacer(ReplicaPlacer replicaPlacer) { this.replicaPlacer = replicaPlacer; return this; } Builder setFeatureControlManager(FeatureControlManager featureControl) { this.featureControl = featureControl; return this; } Builder setBrokerShutdownHandler(BrokerShutdownHandler brokerShutdownHandler) { this.brokerShutdownHandler = brokerShutdownHandler; return this; } Builder setMetrics(QuorumControllerMetrics metrics) { this.metrics = metrics; return this; } ClusterControlManager build() { if (logContext == null) { logContext = new LogContext(); } if (clusterId == null) { clusterId = Uuid.randomUuid().toString(); } if (snapshotRegistry == null) { snapshotRegistry = new SnapshotRegistry(logContext); } if (replicaPlacer == null) { replicaPlacer = new StripedReplicaPlacer(new Random()); } if (featureControl == null) { throw new RuntimeException("You must specify FeatureControlManager"); } if (brokerShutdownHandler == null) { throw new RuntimeException("You must specify BrokerShutdownHandler"); } if (metrics == null) { metrics = new QuorumControllerMetrics(Optional.empty(), time, 0); } return new ClusterControlManager(logContext, clusterId, time, snapshotRegistry, sessionTimeoutNs, replicaPlacer, featureControl, brokerShutdownHandler, metrics ); } } class ReadyBrokersFuture { private final CompletableFuture<Void> future; private final int minBrokers; ReadyBrokersFuture(CompletableFuture<Void> future, int minBrokers) { this.future = future; this.minBrokers = minBrokers; } boolean check() { int numUnfenced = 0; for (BrokerRegistration registration : brokerRegistrations.values()) { if (!registration.fenced()) { numUnfenced++; } if (numUnfenced >= minBrokers) { return true; } } return false; } } /** * The SLF4J log context. */ private final LogContext logContext; /** * The ID of this cluster. */ private final String clusterId; /** * The SLF4J log object. */ private final Logger log; /** * The Kafka clock object to use. */ private final Time time; /** * How long sessions should last, in nanoseconds. */ private final long sessionTimeoutNs; /** * The replica placer to use. */ private final ReplicaPlacer replicaPlacer; /** * Maps broker IDs to broker registrations. */ private final TimelineHashMap<Integer, BrokerRegistration> brokerRegistrations; /** * Save the offset of each broker registration record, we will only unfence a * broker when its high watermark has reached its broker registration record, * this is not necessarily the exact offset of each broker registration record * but should not be smaller than it. */ private final TimelineHashMap<Integer, Long> registerBrokerRecordOffsets; /** * The broker heartbeat manager, or null if this controller is on standby. */ private volatile BrokerHeartbeatManager heartbeatManager; /** * A future which is completed as soon as we have the given number of brokers * ready. */ private Optional<ReadyBrokersFuture> readyBrokersFuture; /** * The feature control manager. */ private final FeatureControlManager featureControl; private final BrokerShutdownHandler brokerShutdownHandler; /** * Maps controller IDs to controller registrations. */ private final TimelineHashMap<Integer, ControllerRegistration> controllerRegistrations; /** * Maps directories to their respective brokers. */ private final TimelineHashMap<Uuid, Integer> directoryToBroker; /** * Manages the kafka.controller:type=KafkaController,name=TimeSinceLastHeartbeatReceivedMs,broker=<brokerId> metrics. */ private final QuorumControllerMetrics metrics; private ClusterControlManager( LogContext logContext, String clusterId, Time time, SnapshotRegistry snapshotRegistry, long sessionTimeoutNs, ReplicaPlacer replicaPlacer, FeatureControlManager featureControl, BrokerShutdownHandler brokerShutdownHandler, QuorumControllerMetrics metrics ) { this.logContext = logContext; this.clusterId = clusterId; this.log = logContext.logger(ClusterControlManager.class); this.time = time; this.sessionTimeoutNs = sessionTimeoutNs; this.replicaPlacer = replicaPlacer; this.brokerRegistrations = new TimelineHashMap<>(snapshotRegistry, 0); this.registerBrokerRecordOffsets = new TimelineHashMap<>(snapshotRegistry, 0); this.heartbeatManager = null; this.readyBrokersFuture = Optional.empty(); this.featureControl = featureControl; this.controllerRegistrations = new TimelineHashMap<>(snapshotRegistry, 0); this.directoryToBroker = new TimelineHashMap<>(snapshotRegistry, 0); this.brokerShutdownHandler = brokerShutdownHandler; this.metrics = metrics; } ReplicaPlacer replicaPlacer() { return replicaPlacer; } /** * Transition this ClusterControlManager to active. */ public void activate() { heartbeatManager = new BrokerHeartbeatManager(logContext, time, sessionTimeoutNs); long nowNs = time.nanoseconds(); for (BrokerRegistration registration : brokerRegistrations.values()) { int brokerId = registration.id(); heartbeatManager.register(brokerId, registration.fenced()); metrics.addTimeSinceLastHeartbeatMetric(brokerId); if (!registration.fenced()) { heartbeatManager.tracker().updateContactTime( new BrokerIdAndEpoch(brokerId, registration.epoch()), nowNs); } } } String clusterId() { // Visible for testing return clusterId; } /** * Transition this ClusterControlManager to standby. */ public void deactivate() { heartbeatManager = null; metrics.removeTimeSinceLastHeartbeatMetrics(); } Map<Integer, BrokerRegistration> brokerRegistrations() { return brokerRegistrations; } /** * Process an incoming broker registration request. */ public ControllerResult<BrokerRegistrationReply> registerBroker( BrokerRegistrationRequestData request, long newBrokerEpoch, FinalizedControllerFeatures finalizedFeatures, boolean cleanShutdownDetectionEnabled ) { if (heartbeatManager == null) { throw new RuntimeException("ClusterControlManager is not active."); } if (!clusterId.equals(request.clusterId())) { throw new InconsistentClusterIdException("Expected cluster ID " + clusterId + ", but got cluster ID " + request.clusterId()); } int brokerId = request.brokerId(); List<ApiMessageAndVersion> records = new ArrayList<>(); BrokerRegistration existing = brokerRegistrations.get(brokerId); Uuid prevIncarnationId = null; long storedBrokerEpoch = -2; // BrokerRegistration.previousBrokerEpoch default value is -1 if (existing != null) { prevIncarnationId = existing.incarnationId(); storedBrokerEpoch = existing.epoch(); if (heartbeatManager.hasValidSession(brokerId, existing.epoch())) { if (!request.incarnationId().equals(prevIncarnationId)) { throw new DuplicateBrokerRegistrationException("Another broker is registered with that broker id. If the broker " + "was recently restarted this should self-resolve once the heartbeat manager expires the broker's session."); } } } if (request.isMigratingZkBroker()) { throw new BrokerIdNotRegisteredException("Controller does not support registering ZK brokers."); } if (featureControl.metadataVersionOrThrow().isDirectoryAssignmentSupported()) { if (request.logDirs().isEmpty()) { throw new InvalidRegistrationException("No directories specified in request"); } if (request.logDirs().stream().anyMatch(DirectoryId::reserved)) { throw new InvalidRegistrationException("Reserved directory ID in request"); } Set<Uuid> set = new HashSet<>(request.logDirs()); if (set.size() != request.logDirs().size()) { throw new InvalidRegistrationException("Duplicate directory ID in request"); } for (Uuid directory : request.logDirs()) { Integer dirBrokerId = directoryToBroker.get(directory); if (dirBrokerId != null && dirBrokerId != brokerId) { throw new InvalidRegistrationException("Broker " + dirBrokerId + " is already registered with directory " + directory); } } } ListenerInfo listenerInfo = ListenerInfo.fromBrokerRegistrationRequest(request.listeners()); RegisterBrokerRecord record = new RegisterBrokerRecord(). setBrokerId(brokerId). setIsMigratingZkBroker(request.isMigratingZkBroker()). setIncarnationId(request.incarnationId()). setRack(request.rack()). setEndPoints(listenerInfo.toBrokerRegistrationRecord()); // Track which finalized features we have not yet verified are supported by the broker. Map<String, Short> unverifiedFeatures = new HashMap<>(finalizedFeatures.featureMap()); // Check every broker feature version range includes the finalized version. for (BrokerRegistrationRequestData.Feature feature : request.features()) { record.features().add(processRegistrationFeature(brokerId, finalizedFeatures, feature)); unverifiedFeatures.remove(feature.name()); } if (request.features().find(MetadataVersion.FEATURE_NAME) == null) throw new InvalidRegistrationException("Request features do not contain '" + MetadataVersion.FEATURE_NAME + "'"); // We also need to check every controller feature is supported by the broker. unverifiedFeatures.forEach((featureName, finalizedVersion) -> { if (finalizedVersion != 0 && request.features().findAll(featureName).isEmpty()) { processRegistrationFeature(brokerId, finalizedFeatures, new BrokerRegistrationRequestData.Feature(). setName(featureName). setMinSupportedVersion((short) 0). setMaxSupportedVersion((short) 0)); } }); if (featureControl.metadataVersionOrThrow().isDirectoryAssignmentSupported()) { record.setLogDirs(request.logDirs()); } if (!request.incarnationId().equals(prevIncarnationId)) { int prevNumRecords = records.size(); boolean isCleanShutdown = cleanShutdownDetectionEnabled ? storedBrokerEpoch == request.previousBrokerEpoch() : false; brokerShutdownHandler.addRecordsForShutdown(request.brokerId(), isCleanShutdown, records); int numRecordsAdded = records.size() - prevNumRecords; if (existing == null) { log.info("No previous registration found for broker {}. New incarnation ID is " + "{}. Generated {} record(s) to clean up previous incarnations. New broker " + "epoch is {}.", brokerId, request.incarnationId(), numRecordsAdded, newBrokerEpoch); metrics.addTimeSinceLastHeartbeatMetric(brokerId); } else { log.info("Registering a new incarnation of broker {}. Previous incarnation ID " + "was {}; new incarnation ID is {}. Generated {} record(s) to clean up " + "previous incarnations. Broker epoch will become {}.", brokerId, existing.incarnationId(), request.incarnationId(), numRecordsAdded, newBrokerEpoch); } record.setBrokerEpoch(newBrokerEpoch); } else { log.info("Amending registration of broker {}, incarnation ID {}. Broker epoch remains {}.", request.brokerId(), request.incarnationId(), existing.epoch()); record.setFenced(existing.fenced()); record.setInControlledShutdown(existing.inControlledShutdown()); record.setBrokerEpoch(existing.epoch()); } records.add(new ApiMessageAndVersion(record, featureControl.metadataVersionOrThrow(). registerBrokerRecordVersion())); if (!request.incarnationId().equals(prevIncarnationId)) { // Remove any existing session for the old broker incarnation. heartbeatManager.remove(brokerId); } heartbeatManager.register(brokerId, record.fenced()); // A broker registration that cleans up a previous incarnation's unclean shutdown may generate a large number of records. // It is safe to return these records as a non-atomic batch as long as the registration record is added last. // This ensures that in case of a controller failure, the broker will re-register and the new controller // can retry the unclean shutdown cleanup. return ControllerResult.of(records, new BrokerRegistrationReply(record.brokerEpoch())); } ControllerResult<Void> registerController(ControllerRegistrationRequestData request) { if (!featureControl.metadataVersionOrThrow().isControllerRegistrationSupported()) { throw new UnsupportedVersionException("The current MetadataVersion is too old to " + "support controller registrations."); } ListenerInfo listenerInfo = ListenerInfo.fromControllerRegistrationRequest(request.listeners()); ControllerFeatureCollection features = new ControllerFeatureCollection(); request.features().forEach(feature -> features.add(new RegisterControllerRecord.ControllerFeature(). setName(feature.name()). setMaxSupportedVersion(feature.maxSupportedVersion()). setMinSupportedVersion(feature.minSupportedVersion())) ); List<ApiMessageAndVersion> records = new ArrayList<>(); records.add(new ApiMessageAndVersion(new RegisterControllerRecord(). setControllerId(request.controllerId()). setIncarnationId(request.incarnationId()). setZkMigrationReady(false). setEndPoints(listenerInfo.toControllerRegistrationRecord()). setFeatures(features), (short) 0)); return ControllerResult.atomicOf(records, null); } BrokerFeature processRegistrationFeature( int brokerId, FinalizedControllerFeatures finalizedFeatures, BrokerRegistrationRequestData.Feature feature ) { // MetadataVersion has no default while the other features default to `0` short finalized; if (feature.name().equals(MetadataVersion.FEATURE_NAME)) finalized = finalizedFeatures.get(feature.name()).orElseThrow(() -> new IllegalArgumentException("Feature with name '" + MetadataVersion.FEATURE_NAME + "' not found in finalizedFeatures " + finalizedFeatures)); else finalized = finalizedFeatures.versionOrDefault(feature.name(), (short) 0); if (!VersionRange.of(feature.minSupportedVersion(), feature.maxSupportedVersion()).contains(finalized)) { throw new UnsupportedVersionException("Unable to register because the broker " + "does not support finalized version " + finalized + " of " + feature.name() + ". The broker wants a version between " + feature.minSupportedVersion() + " and " + feature.maxSupportedVersion() + ", inclusive."); } // A feature is not found in the finalizedFeature map if it is unknown to the controller or set to 0 (feature not enabled). if (!finalizedFeatures.featureNames().contains(feature.name())) log.debug("Broker {} registered with version range ({}, {}] of feature {} which controller does not know " + "or has finalized version of 0.", brokerId, feature.minSupportedVersion(), feature.maxSupportedVersion(), feature.name()); return new BrokerFeature(). setName(feature.name()). setMinSupportedVersion(feature.minSupportedVersion()). setMaxSupportedVersion(feature.maxSupportedVersion()); } /** * Track an incoming broker heartbeat. Unlike most functions, this one is not called from the main * controller thread, so it can only access local, volatile and atomic data. * * @param brokerId The broker id to track. * @param brokerEpoch The broker epoch to track. * * @return True only if the ClusterControlManager is active. */ boolean trackBrokerHeartbeat(int brokerId, long brokerEpoch) { BrokerHeartbeatManager manager = heartbeatManager; if (manager == null) return false; manager.tracker().updateContactTime(new BrokerIdAndEpoch(brokerId, brokerEpoch)); return true; } public OptionalLong registerBrokerRecordOffset(int brokerId) { Long registrationOffset = registerBrokerRecordOffsets.get(brokerId); if (registrationOffset != null) { return OptionalLong.of(registrationOffset); } return OptionalLong.empty(); } public void replay(RegisterBrokerRecord record, long offset) { registerBrokerRecordOffsets.put(record.brokerId(), offset); int brokerId = record.brokerId(); ListenerInfo listenerInfo = ListenerInfo.fromBrokerRegistrationRecord(record.endPoints()); Map<String, VersionRange> features = new HashMap<>(); for (BrokerFeature feature : record.features()) { features.put(feature.name(), VersionRange.of( feature.minSupportedVersion(), feature.maxSupportedVersion())); } // Update broker registrations. BrokerRegistration prevRegistration = brokerRegistrations.put(brokerId, new BrokerRegistration.Builder(). setId(brokerId). setEpoch(record.brokerEpoch()). setIncarnationId(record.incarnationId()). setListeners(listenerInfo.listeners()). setSupportedFeatures(features). setRack(Optional.ofNullable(record.rack())). setFenced(record.fenced()). setInControlledShutdown(record.inControlledShutdown()). setIsMigratingZkBroker(record.isMigratingZkBroker()). setDirectories(record.logDirs()). build()); updateDirectories(brokerId, prevRegistration == null ? null : prevRegistration.directories(), record.logDirs()); if (heartbeatManager != null) { if (prevRegistration != null) heartbeatManager.remove(brokerId); heartbeatManager.register(brokerId, record.fenced()); } if (prevRegistration == null) { log.info("Replayed initial RegisterBrokerRecord for broker {}: {}", record.brokerId(), record); } else if (prevRegistration.incarnationId().equals(record.incarnationId())) { log.info("Replayed RegisterBrokerRecord modifying the registration for broker {}: {}", record.brokerId(), record); } else { log.info("Replayed RegisterBrokerRecord establishing a new incarnation of broker {}: {}", record.brokerId(), record); } } public void replay(UnregisterBrokerRecord record) { registerBrokerRecordOffsets.remove(record.brokerId()); int brokerId = record.brokerId(); BrokerRegistration registration = brokerRegistrations.get(brokerId); if (registration == null) { throw new RuntimeException(String.format("Unable to replay %s: no broker " + "registration found for that id", record)); } else if (registration.epoch() != record.brokerEpoch()) { throw new RuntimeException(String.format("Unable to replay %s: no broker " + "registration with that epoch found", record)); } else { if (heartbeatManager != null) heartbeatManager.remove(brokerId); updateDirectories(brokerId, registration.directories(), null); brokerRegistrations.remove(brokerId); log.info("Replayed {}", record); } } public void replay(FenceBrokerRecord record) { replayRegistrationChange( record, record.id(), record.epoch(), BrokerRegistrationFencingChange.FENCE.asBoolean(), BrokerRegistrationInControlledShutdownChange.NONE.asBoolean(), Optional.empty() ); } public void replay(UnfenceBrokerRecord record) { replayRegistrationChange( record, record.id(), record.epoch(), BrokerRegistrationFencingChange.UNFENCE.asBoolean(), BrokerRegistrationInControlledShutdownChange.NONE.asBoolean(), Optional.empty() ); } public void replay(BrokerRegistrationChangeRecord record) { BrokerRegistrationFencingChange fencingChange = BrokerRegistrationFencingChange.fromValue(record.fenced()).orElseThrow( () -> new IllegalStateException(String.format("Unable to replay %s: unknown " + "value for fenced field: %x", record, record.fenced()))); BrokerRegistrationInControlledShutdownChange inControlledShutdownChange = BrokerRegistrationInControlledShutdownChange.fromValue(record.inControlledShutdown()).orElseThrow( () -> new IllegalStateException(String.format("Unable to replay %s: unknown " + "value for inControlledShutdown field: %x", record, record.inControlledShutdown()))); Optional<List<Uuid>> directoriesChange = Optional.ofNullable(record.logDirs()).filter(list -> !list.isEmpty()); replayRegistrationChange( record, record.brokerId(), record.brokerEpoch(), fencingChange.asBoolean(), inControlledShutdownChange.asBoolean(), directoriesChange ); } private void replayRegistrationChange( ApiMessage record, int brokerId, long brokerEpoch, Optional<Boolean> fencingChange, Optional<Boolean> inControlledShutdownChange, Optional<List<Uuid>> directoriesChange ) { BrokerRegistration curRegistration = brokerRegistrations.get(brokerId); if (curRegistration == null) { throw new RuntimeException(String.format("Unable to replay %s: no broker " + "registration found for that id", record.toString())); } else if (curRegistration.epoch() != brokerEpoch) { throw new RuntimeException(String.format("Unable to replay %s: no broker " + "registration with that epoch found", record.toString())); } else { BrokerRegistration nextRegistration = curRegistration.cloneWith( fencingChange, inControlledShutdownChange, directoriesChange ); if (!curRegistration.equals(nextRegistration)) { log.info("Replayed {} modifying the registration for broker {}: {}", record.getClass().getSimpleName(), brokerId, record); brokerRegistrations.put(brokerId, nextRegistration); } else { log.info("Ignoring no-op registration change for {}", curRegistration); } updateDirectories(brokerId, curRegistration.directories(), nextRegistration.directories()); if (heartbeatManager != null) heartbeatManager.register(brokerId, nextRegistration.fenced()); if (readyBrokersFuture.isPresent()) { if (readyBrokersFuture.get().check()) { readyBrokersFuture.get().future.complete(null); readyBrokersFuture = Optional.empty(); } } } } public void replay(RegisterControllerRecord record) { ControllerRegistration newRegistration = new ControllerRegistration.Builder(record).build(); ControllerRegistration prevRegistration = controllerRegistrations.put(record.controllerId(), newRegistration); log.info("Replayed RegisterControllerRecord containing {}.{}", newRegistration, prevRegistration == null ? "" : " Previous incarnation was " + prevRegistration.incarnationId()); } Iterator<UsableBroker> usableBrokers() { if (heartbeatManager == null) { throw new RuntimeException("ClusterControlManager is not active."); } return heartbeatManager.usableBrokers( id -> brokerRegistrations.get(id).rack()); } /** * Returns true if the broker is unfenced; Returns false if it is * not or if it does not exist. */ public boolean isUnfenced(int brokerId) { BrokerRegistration registration = brokerRegistrations.get(brokerId); if (registration == null) return false; return !registration.fenced(); } /** * Get a broker registration if it exists. * * @param brokerId The brokerId to get the registration for * @return The current registration or null if the broker is not registered */ public BrokerRegistration registration(int brokerId) { return brokerRegistrations.get(brokerId); } /** * Returns true if the broker is in controlled shutdown state; Returns false * if it is not or if it does not exist. */ public boolean inControlledShutdown(int brokerId) { BrokerRegistration registration = brokerRegistrations.get(brokerId); if (registration == null) return false; return registration.inControlledShutdown(); } /** * Returns true if the broker is active. Active means not fenced nor in controlled * shutdown; Returns false if it is not active or if it does not exist. */ public boolean isActive(int brokerId) { BrokerRegistration registration = brokerRegistrations.get(brokerId); if (registration == null) return false; return !registration.inControlledShutdown() && !registration.fenced(); } BrokerHeartbeatManager heartbeatManager() { // We throw RuntimeException here rather than NotControllerException because all the callers // have already verified that we are active. For example, ControllerWriteEvent.run verifies // that we are the current active controller before running any event-specific code. if (heartbeatManager == null) { throw new RuntimeException("ClusterControlManager is not active."); } return heartbeatManager; } public void checkBrokerEpoch(int brokerId, long brokerEpoch) { BrokerRegistration registration = brokerRegistrations.get(brokerId); if (registration == null) { throw new StaleBrokerEpochException("No broker registration found for " + "broker id " + brokerId); } if (registration.epoch() != brokerEpoch) { throw new StaleBrokerEpochException("Expected broker epoch " + registration.epoch() + ", but got broker epoch " + brokerEpoch); } } public void addReadyBrokersFuture(CompletableFuture<Void> future, int minBrokers) { readyBrokersFuture = Optional.of(new ReadyBrokersFuture(future, minBrokers)); if (readyBrokersFuture.get().check()) { readyBrokersFuture.get().future.complete(null); readyBrokersFuture = Optional.empty(); } } /** * Determines whether the specified directory is online in the specified broker. * Returns false whether the directory is not online or the broker is not registered. */ public boolean hasOnlineDir(int brokerId, Uuid directoryId) { BrokerRegistration registration = registration(brokerId); return registration != null && registration.hasOnlineDir(directoryId); } /** * Determines the default directory for new partitions placed on a given broker. */ public Uuid defaultDir(int brokerId) { BrokerRegistration registration = registration(brokerId); if (registration == null) { // throwing an exception here can break the expected error from an // Admin call, so instead, we return UNASSIGNED, and let the fact // that the broker is not registered be handled elsewhere. return DirectoryId.UNASSIGNED; } List<Uuid> directories = registration.directories(); if (directories.isEmpty()) { return DirectoryId.MIGRATING; } if (directories.size() == 1) { return directories.get(0); } return DirectoryId.UNASSIGNED; } void updateDirectories(int brokerId, List<Uuid> dirsToRemove, List<Uuid> dirsToAdd) { if (dirsToRemove != null) { for (Uuid directory : dirsToRemove) { if (!directoryToBroker.remove(directory, brokerId)) { throw new IllegalStateException("BUG: directory " + directory + " not assigned to broker " + brokerId); } } } if (dirsToAdd != null) { for (Uuid directory : dirsToAdd) { Integer existingId = directoryToBroker.putIfAbsent(directory, brokerId); if (existingId != null && existingId != brokerId) { throw new IllegalStateException("BUG: directory " + directory + " already assigned to broker " + existingId); } } } } Iterator<Entry<Integer, Map<String, VersionRange>>> brokerSupportedFeatures() { return new Iterator<>() { private final Iterator<BrokerRegistration> iter = brokerRegistrations.values().iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public Entry<Integer, Map<String, VersionRange>> next() { BrokerRegistration registration = iter.next(); return new AbstractMap.SimpleImmutableEntry<>(registration.id(), registration.supportedFeatures()); } }; } Iterator<Entry<Integer, Map<String, VersionRange>>> controllerSupportedFeatures() { if (!featureControl.metadataVersionOrThrow().isControllerRegistrationSupported()) { throw new UnsupportedVersionException("The current MetadataVersion is too old to " + "support controller registrations."); } return new Iterator<>() { private final Iterator<ControllerRegistration> iter = controllerRegistrations.values().iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public Entry<Integer, Map<String, VersionRange>> next() { ControllerRegistration registration = iter.next(); return new AbstractMap.SimpleImmutableEntry<>(registration.id(), registration.supportedFeatures()); } }; } @FunctionalInterface interface BrokerShutdownHandler { void addRecordsForShutdown(int brokerId, boolean isCleanShutdown, List<ApiMessageAndVersion> records); } }
apache/tez
38,050
tez-tests/src/test/java/org/apache/tez/mapreduce/TestMRRJobsDAGApi.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import javax.tools.JavaCompiler; import javax.tools.JavaFileObject; import javax.tools.JavaFileObject.Kind; import javax.tools.SimpleJavaFileObject; import javax.tools.StandardJavaFileManager; import javax.tools.StandardLocation; import javax.tools.ToolProvider; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.tez.client.TezAppMasterStatus; import org.apache.tez.client.TezClient; import org.apache.tez.client.TezClientUtils; import org.apache.tez.common.Preconditions; import org.apache.tez.common.ReflectionUtils; import org.apache.tez.common.TezClassLoader; import org.apache.tez.common.TezUtils; import org.apache.tez.common.counters.FileSystemCounter; import org.apache.tez.common.counters.TaskCounter; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.DataSinkDescriptor; import org.apache.tez.dag.api.DataSourceDescriptor; import org.apache.tez.dag.api.Edge; import org.apache.tez.dag.api.EdgeProperty; import org.apache.tez.dag.api.EdgeProperty.DataMovementType; import org.apache.tez.dag.api.EdgeProperty.DataSourceType; import org.apache.tez.dag.api.EdgeProperty.SchedulingType; import org.apache.tez.dag.api.InputDescriptor; import org.apache.tez.dag.api.InputInitializerDescriptor; import org.apache.tez.dag.api.OutputDescriptor; import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.TezReflectionException; import org.apache.tez.dag.api.TezUncheckedException; import org.apache.tez.dag.api.UserPayload; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.client.DAGClient; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.api.client.DAGStatus.State; import org.apache.tez.dag.api.client.StatusGetOpts; import org.apache.tez.dag.history.logging.impl.SimpleHistoryLoggingService; import org.apache.tez.mapreduce.common.MRInputAMSplitGenerator; import org.apache.tez.mapreduce.examples.BroadcastAndOneToOneExample; import org.apache.tez.mapreduce.examples.ExampleDriver; import org.apache.tez.mapreduce.examples.MRRSleepJob; import org.apache.tez.mapreduce.examples.MRRSleepJob.ISleepReducer; import org.apache.tez.mapreduce.examples.MRRSleepJob.MRRSleepJobPartitioner; import org.apache.tez.mapreduce.examples.MRRSleepJob.SleepInputFormat; import org.apache.tez.mapreduce.examples.MRRSleepJob.SleepMapper; import org.apache.tez.mapreduce.examples.MRRSleepJob.SleepReducer; import org.apache.tez.mapreduce.examples.UnionExample; import org.apache.tez.mapreduce.hadoop.MRHelpers; import org.apache.tez.mapreduce.hadoop.MRInputHelpers; import org.apache.tez.mapreduce.hadoop.MRJobConfig; import org.apache.tez.mapreduce.input.MRInputLegacy; import org.apache.tez.mapreduce.output.MROutputLegacy; import org.apache.tez.mapreduce.processor.map.MapProcessor; import org.apache.tez.mapreduce.processor.reduce.ReduceProcessor; import org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRInputUserPayloadProto; import org.apache.tez.runtime.api.Event; import org.apache.tez.runtime.api.InputInitializer; import org.apache.tez.runtime.api.InputInitializerContext; import org.apache.tez.runtime.library.api.TezRuntimeConfiguration; import org.apache.tez.runtime.library.input.OrderedGroupedInputLegacy; import org.apache.tez.runtime.library.output.OrderedPartitionedKVOutput; import org.apache.tez.runtime.library.processor.SleepProcessor; import org.apache.tez.runtime.library.processor.SleepProcessor.SleepProcessorConfig; import org.apache.tez.test.MiniTezCluster; import com.google.common.collect.Sets; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestMRRJobsDAGApi { private static final Logger LOG = LoggerFactory.getLogger(TestMRRJobsDAGApi.class); protected static MiniTezCluster mrrTezCluster; protected static MiniDFSCluster dfsCluster; private static Configuration conf = new Configuration(); private static FileSystem remoteFs; private Random random = new Random(); private static String TEST_ROOT_DIR = "target" + Path.SEPARATOR + TestMRRJobsDAGApi.class.getName() + "-tmpDir"; @BeforeClass public static void setup() throws IOException { try { conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR); dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .format(true).racks(null).build(); remoteFs = dfsCluster.getFileSystem(); } catch (IOException io) { throw new RuntimeException("problem starting mini dfs cluster", io); } if (mrrTezCluster == null) { mrrTezCluster = new MiniTezCluster(TestMRRJobsDAGApi.class.getName(), 1, 1, 1); Configuration conf = new Configuration(); conf.set("fs.defaultFS", remoteFs.getUri().toString()); // use HDFS conf.setInt("yarn.nodemanager.delete.debug-delay-sec", 20000); conf.setLong(TezConfiguration.TEZ_AM_SLEEP_TIME_BEFORE_EXIT_MILLIS, 500); mrrTezCluster.init(conf); mrrTezCluster.start(); } } @AfterClass public static void tearDown() { if (mrrTezCluster != null) { mrrTezCluster.stop(); mrrTezCluster = null; } if (dfsCluster != null) { dfsCluster.shutdown(); dfsCluster = null; } // TODO Add cleanup code. } @Test(timeout = 60000) public void testSleepJob() throws TezException, IOException, InterruptedException { SleepProcessorConfig spConf = new SleepProcessorConfig(1); DAG dag = DAG.create("DAG-testSleepJob"); Vertex vertex = Vertex.create("SleepVertex", ProcessorDescriptor.create( SleepProcessor.class.getName()).setUserPayload(spConf.toUserPayload()), 1, Resource.newInstance(1024, 1)); dag.addVertex(vertex); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); Path remoteStagingDir = remoteFs.makeQualified(new Path("/tmp", String.valueOf(random .nextInt(100000)))); remoteFs.mkdirs(remoteStagingDir); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, remoteStagingDir.toString()); TezClient tezSession = TezClient.create("TezSleepProcessor", tezConf, false); tezSession.start(); DAGClient dagClient = tezSession.submitDAG(dag); DAGStatus dagStatus = dagClient.getDAGStatus(null); while (!dagStatus.isCompleted()) { LOG.info("Waiting for job to complete. Sleeping for 500ms." + " Current state: " + dagStatus.getState()); Thread.sleep(500l); dagStatus = dagClient.getDAGStatus(null); assertTrue("Memory used by AM is supposed to be 0 if not requested", dagStatus.getMemoryUsedByAM() == 0); assertTrue("Memory used by tasks is supposed to be 0 if not requested", dagStatus.getMemoryUsedByTasks() == 0); } dagStatus = dagClient.getDAGStatus(Sets.newHashSet(StatusGetOpts.GET_COUNTERS, StatusGetOpts.GET_MEMORY_USAGE)); assertEquals(DAGStatus.State.SUCCEEDED, dagStatus.getState()); assertNotNull(dagStatus.getDAGCounters()); assertNotNull(dagStatus.getDAGCounters().getGroup(FileSystemCounter.class.getName())); assertNotNull(dagStatus.getDAGCounters().findCounter(TaskCounter.GC_TIME_MILLIS)); assertTrue("Memory used by AM is supposed to be >0", dagStatus.getMemoryUsedByAM() > 0); assertTrue("Memory used by tasks is supposed to be >0", dagStatus.getMemoryUsedByTasks() > 0); ExampleDriver.printDAGStatus(dagClient, new String[] { "SleepVertex" }, true, true); tezSession.stop(); } @Test(timeout = 60000) public void testNonDefaultFSStagingDir() throws Exception { SleepProcessorConfig spConf = new SleepProcessorConfig(1); DAG dag = DAG.create("DAG-testNonDefaultFSStagingDir"); Vertex vertex = Vertex.create("SleepVertex", ProcessorDescriptor.create( SleepProcessor.class.getName()).setUserPayload(spConf.toUserPayload()), 1, Resource.newInstance(1024, 1)); dag.addVertex(vertex); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); Path stagingDir = new Path(TEST_ROOT_DIR, "testNonDefaultFSStagingDir" + String.valueOf(random.nextInt(100000))); FileSystem localFs = FileSystem.getLocal(tezConf); stagingDir = localFs.makeQualified(stagingDir); localFs.mkdirs(stagingDir); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDir.toString()); TezClient tezSession = TezClient.create("TezSleepProcessor", tezConf, false); tezSession.start(); DAGClient dagClient = tezSession.submitDAG(dag); DAGStatus dagStatus = dagClient.getDAGStatus(null); while (!dagStatus.isCompleted()) { LOG.info("Waiting for job to complete. Sleeping for 500ms." + " Current state: " + dagStatus.getState()); Thread.sleep(500l); dagStatus = dagClient.getDAGStatus(null); } dagStatus = dagClient.getDAGStatus(Sets.newHashSet(StatusGetOpts.GET_COUNTERS)); assertEquals(DAGStatus.State.SUCCEEDED, dagStatus.getState()); assertNotNull(dagStatus.getDAGCounters()); assertNotNull(dagStatus.getDAGCounters().getGroup(FileSystemCounter.class.getName())); assertNotNull(dagStatus.getDAGCounters().findCounter(TaskCounter.GC_TIME_MILLIS)); ExampleDriver.printDAGStatus(dagClient, new String[] { "SleepVertex" }, true, true); tezSession.stop(); } // Submits a simple 5 stage sleep job using tez session. Then kills it. @Test(timeout = 60000) public void testHistoryLogging() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { SleepProcessorConfig spConf = new SleepProcessorConfig(1); DAG dag = DAG.create("TezSleepProcessorHistoryLogging"); Vertex vertex = Vertex.create("SleepVertex", ProcessorDescriptor.create( SleepProcessor.class.getName()).setUserPayload(spConf.toUserPayload()), 2, Resource.newInstance(1024, 1)); dag.addVertex(vertex); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); Path remoteStagingDir = remoteFs.makeQualified(new Path("/tmp", String.valueOf(random .nextInt(100000)))); remoteFs.mkdirs(remoteStagingDir); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, remoteStagingDir.toString()); FileSystem localFs = FileSystem.getLocal(tezConf); Path historyLogDir = new Path(TEST_ROOT_DIR, "testHistoryLogging"); localFs.mkdirs(historyLogDir); tezConf.set(TezConfiguration.TEZ_SIMPLE_HISTORY_LOGGING_DIR, localFs.makeQualified(historyLogDir).toString()); tezConf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, false); TezClient tezSession = TezClient.create("TezSleepProcessorHistoryLogging", tezConf); tezSession.start(); DAGClient dagClient = tezSession.submitDAG(dag); DAGStatus dagStatus = dagClient.getDAGStatus(null); while (!dagStatus.isCompleted()) { LOG.info("Waiting for job to complete. Sleeping for 500ms." + " Current state: " + dagStatus.getState()); Thread.sleep(500l); dagStatus = dagClient.getDAGStatus(null); } assertEquals(DAGStatus.State.SUCCEEDED, dagStatus.getState()); FileStatus historyLogFileStatus = null; for (FileStatus fileStatus : localFs.listStatus(historyLogDir)) { if (fileStatus.isDirectory()) { continue; } Path p = fileStatus.getPath(); if (p.getName().startsWith(SimpleHistoryLoggingService.LOG_FILE_NAME_PREFIX)) { historyLogFileStatus = fileStatus; break; } } Assert.assertNotNull(historyLogFileStatus); Assert.assertTrue(historyLogFileStatus.getLen() > 0); tezSession.stop(); } // Submits a simple 5 stage sleep job using the DAG submit API instead of job // client. @Test(timeout = 60000) public void testMRRSleepJobDagSubmit() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { State finalState = testMRRSleepJobDagSubmitCore(false, false, false, false); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); // TODO Add additional checks for tracking URL etc. - once it's exposed by // the DAG API. } // Submits a simple 5 stage sleep job using the DAG submit API. Then kills it. @Test(timeout = 60000) public void testMRRSleepJobDagSubmitAndKill() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { State finalState = testMRRSleepJobDagSubmitCore(false, true, false, false); Assert.assertEquals(DAGStatus.State.KILLED, finalState); // TODO Add additional checks for tracking URL etc. - once it's exposed by // the DAG API. } // Submits a DAG to AM via RPC after AM has started @Test(timeout = 60000) public void testMRRSleepJobViaSession() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { State finalState = testMRRSleepJobDagSubmitCore(true, false, false, false); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); } // Submit 2 jobs via RPC using a custom initializer. The second job is submitted with an // additional local resource, which is verified by the initializer. @Test(timeout = 120000) public void testAMRelocalization() throws Exception { Path relocPath = new Path("/tmp/relocalizationfilefound"); if (remoteFs.exists(relocPath)) { remoteFs.delete(relocPath, true); } TezClient tezSession = createTezSession(); State finalState = testMRRSleepJobDagSubmitCore(true, false, false, tezSession, true, MRInputAMSplitGeneratorRelocalizationTest.class, null); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); Assert.assertFalse(remoteFs.exists(new Path("/tmp/relocalizationfilefound"))); // Start the second job with some additional resources. // Create a test jar directly to HDFS LOG.info("Creating jar for relocalization test"); Path relocFilePath = new Path("/tmp/test.jar"); relocFilePath = remoteFs.makeQualified(relocFilePath); OutputStream os = remoteFs.create(relocFilePath, true); createTestJar(os, RELOCALIZATION_TEST_CLASS_NAME); // Also upload one of Tez's own JARs to HDFS and add as resource; should be ignored Path tezAppJar = new Path(MiniTezCluster.APPJAR); Path tezAppJarRemote = remoteFs.makeQualified(new Path("/tmp/" + tezAppJar.getName())); remoteFs.copyFromLocalFile(tezAppJar, tezAppJarRemote); Map<String, LocalResource> additionalResources = new HashMap<String, LocalResource>(); additionalResources.put("test.jar", createLrObjFromPath(relocFilePath)); additionalResources.put("TezAppJar.jar", createLrObjFromPath(tezAppJarRemote)); Assert.assertEquals(TezAppMasterStatus.READY, tezSession.getAppMasterStatus()); finalState = testMRRSleepJobDagSubmitCore(true, false, false, tezSession, true, MRInputAMSplitGeneratorRelocalizationTest.class, additionalResources); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); Assert.assertEquals(TezAppMasterStatus.READY, tezSession.getAppMasterStatus()); Assert.assertTrue(remoteFs.exists(new Path("/tmp/relocalizationfilefound"))); stopAndVerifyYarnApp(tezSession); } private void stopAndVerifyYarnApp(TezClient tezSession) throws TezException, IOException, YarnException { ApplicationId appId = tezSession.getAppMasterApplicationId(); tezSession.stop(); Assert.assertEquals(TezAppMasterStatus.SHUTDOWN, tezSession.getAppMasterStatus()); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(mrrTezCluster.getConfig()); yarnClient.start(); while (true) { ApplicationReport appReport = yarnClient.getApplicationReport(appId); if (appReport.getYarnApplicationState().equals( YarnApplicationState.FINISHED) || appReport.getYarnApplicationState().equals( YarnApplicationState.FAILED) || appReport.getYarnApplicationState().equals( YarnApplicationState.KILLED)) { break; } } ApplicationReport appReport = yarnClient.getApplicationReport(appId); Assert.assertEquals(YarnApplicationState.FINISHED, appReport.getYarnApplicationState()); Assert.assertEquals(FinalApplicationStatus.SUCCEEDED, appReport.getFinalApplicationStatus()); } @Test(timeout = 120000) public void testAMRelocalizationConflict() throws Exception { Path relocPath = new Path("/tmp/relocalizationfilefound"); if (remoteFs.exists(relocPath)) { remoteFs.delete(relocPath, true); } // Run a DAG w/o a file. TezClient tezSession = createTezSession(); State finalState = testMRRSleepJobDagSubmitCore(true, false, false, tezSession, true, MRInputAMSplitGeneratorRelocalizationTest.class, null); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); Assert.assertFalse(remoteFs.exists(relocPath)); // Create a bogus TezAppJar directly to HDFS LOG.info("Creating jar for relocalization test"); Path tezAppJar = new Path(MiniTezCluster.APPJAR); Path tezAppJarRemote = remoteFs.makeQualified(new Path("/tmp/" + tezAppJar.getName())); OutputStream os = remoteFs.create(tezAppJarRemote, true); createTestJar(os, RELOCALIZATION_TEST_CLASS_NAME); Map<String, LocalResource> additionalResources = new HashMap<String, LocalResource>(); additionalResources.put("TezAppJar.jar", createLrObjFromPath(tezAppJarRemote)); try { testMRRSleepJobDagSubmitCore(true, false, false, tezSession, true, MRInputAMSplitGeneratorRelocalizationTest.class, additionalResources); Assert.fail("should have failed"); } catch (Exception ex) { // expected } stopAndVerifyYarnApp(tezSession); } private LocalResource createLrObjFromPath(Path filePath) { return LocalResource.newInstance(ConverterUtils.getYarnUrlFromPath(filePath), LocalResourceType.FILE, LocalResourceVisibility.PRIVATE, 0, 0); } private TezClient createTezSession() throws IOException, TezException { Path remoteStagingDir = remoteFs.makeQualified(new Path("/tmp", String .valueOf(new Random().nextInt(100000)))); remoteFs.mkdirs(remoteStagingDir); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, remoteStagingDir.toString()); TezClient tezSession = TezClient.create("testrelocalizationsession", tezConf, true); tezSession.start(); Assert.assertEquals(TezAppMasterStatus.INITIALIZING, tezSession.getAppMasterStatus()); return tezSession; } // Submits a DAG to AM via RPC after AM has started @Test(timeout = 120000) public void testMultipleMRRSleepJobViaSession() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { Path remoteStagingDir = remoteFs.makeQualified(new Path("/tmp", String .valueOf(new Random().nextInt(100000)))); remoteFs.mkdirs(remoteStagingDir); TezConfiguration tezConf = new TezConfiguration( mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, remoteStagingDir.toString()); TezClient tezSession = TezClient.create("testsession", tezConf, true); tezSession.start(); Assert.assertEquals(TezAppMasterStatus.INITIALIZING, tezSession.getAppMasterStatus()); State finalState = testMRRSleepJobDagSubmitCore(true, false, false, tezSession, false, null, null); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); Assert.assertEquals(TezAppMasterStatus.READY, tezSession.getAppMasterStatus()); finalState = testMRRSleepJobDagSubmitCore(true, false, false, tezSession, false, null, null); Assert.assertEquals(DAGStatus.State.SUCCEEDED, finalState); Assert.assertEquals(TezAppMasterStatus.READY, tezSession.getAppMasterStatus()); stopAndVerifyYarnApp(tezSession); } // Submits a simple 5 stage sleep job using tez session. Then kills it. @Test(timeout = 60000) public void testMRRSleepJobDagSubmitAndKillViaRPC() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { State finalState = testMRRSleepJobDagSubmitCore(true, true, false, false); Assert.assertEquals(DAGStatus.State.KILLED, finalState); // TODO Add additional checks for tracking URL etc. - once it's exposed by // the DAG API. } // Create and close a tez session without submitting a job @Test(timeout = 60000) public void testTezSessionShutdown() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { testMRRSleepJobDagSubmitCore(true, false, true, false); } @Test(timeout = 60000) public void testAMSplitGeneration() throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { testMRRSleepJobDagSubmitCore(true, false, false, true); } public State testMRRSleepJobDagSubmitCore( boolean dagViaRPC, boolean killDagWhileRunning, boolean closeSessionBeforeSubmit, boolean genSplitsInAM) throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { return testMRRSleepJobDagSubmitCore(dagViaRPC, killDagWhileRunning, closeSessionBeforeSubmit, null, genSplitsInAM, null, null); } public State testMRRSleepJobDagSubmitCore( boolean dagViaRPC, boolean killDagWhileRunning, boolean closeSessionBeforeSubmit, TezClient reUseTezSession, boolean genSplitsInAM, Class<? extends InputInitializer> initializerClass, Map<String, LocalResource> additionalLocalResources) throws IOException, InterruptedException, TezException, ClassNotFoundException, YarnException { LOG.info("\n\n\nStarting testMRRSleepJobDagSubmit()."); JobConf stage1Conf = new JobConf(mrrTezCluster.getConfig()); JobConf stage2Conf = new JobConf(mrrTezCluster.getConfig()); JobConf stage3Conf = new JobConf(mrrTezCluster.getConfig()); stage1Conf.setLong(MRRSleepJob.MAP_SLEEP_TIME, 1); stage1Conf.setInt(MRRSleepJob.MAP_SLEEP_COUNT, 1); stage1Conf.setInt(MRJobConfig.NUM_MAPS, 1); stage1Conf.set(MRJobConfig.MAP_CLASS_ATTR, SleepMapper.class.getName()); stage1Conf.set(MRJobConfig.MAP_OUTPUT_KEY_CLASS, IntWritable.class.getName()); stage1Conf.set(MRJobConfig.MAP_OUTPUT_VALUE_CLASS, IntWritable.class.getName()); stage1Conf.set(MRJobConfig.INPUT_FORMAT_CLASS_ATTR, SleepInputFormat.class.getName()); stage1Conf.set(MRJobConfig.PARTITIONER_CLASS_ATTR, MRRSleepJobPartitioner.class.getName()); stage2Conf.setLong(MRRSleepJob.REDUCE_SLEEP_TIME, 1); stage2Conf.setInt(MRRSleepJob.REDUCE_SLEEP_COUNT, 1); stage2Conf.setInt(MRJobConfig.NUM_REDUCES, 1); stage2Conf .set(MRJobConfig.REDUCE_CLASS_ATTR, ISleepReducer.class.getName()); stage2Conf.set(MRJobConfig.MAP_OUTPUT_KEY_CLASS, IntWritable.class.getName()); stage2Conf.set(MRJobConfig.MAP_OUTPUT_VALUE_CLASS, IntWritable.class.getName()); stage2Conf.set(MRJobConfig.PARTITIONER_CLASS_ATTR, MRRSleepJobPartitioner.class.getName()); stage3Conf.setLong(MRRSleepJob.REDUCE_SLEEP_TIME, 1); stage3Conf.setInt(MRRSleepJob.REDUCE_SLEEP_COUNT, 1); stage3Conf.setInt(MRJobConfig.NUM_REDUCES, 1); stage3Conf.set(MRJobConfig.REDUCE_CLASS_ATTR, SleepReducer.class.getName()); stage3Conf.set(MRJobConfig.MAP_OUTPUT_KEY_CLASS, IntWritable.class.getName()); stage3Conf.set(MRJobConfig.MAP_OUTPUT_VALUE_CLASS, IntWritable.class.getName()); MRHelpers.translateMRConfToTez(stage1Conf); MRHelpers.translateMRConfToTez(stage2Conf); MRHelpers.translateMRConfToTez(stage3Conf); MRHelpers.configureMRApiUsage(stage1Conf); MRHelpers.configureMRApiUsage(stage2Conf); MRHelpers.configureMRApiUsage(stage3Conf); Path remoteStagingDir = remoteFs.makeQualified(new Path("/tmp", String .valueOf(new Random().nextInt(100000)))); TezClientUtils.ensureStagingDirExists(conf, remoteStagingDir); UserPayload stage1Payload = TezUtils.createUserPayloadFromConf(stage1Conf); UserPayload stage2Payload = TezUtils.createUserPayloadFromConf(stage2Conf); UserPayload stage3Payload = TezUtils.createUserPayloadFromConf(stage3Conf); DAG dag = DAG.create("testMRRSleepJobDagSubmit-" + random.nextInt(1000)); Class<? extends InputInitializer> inputInitializerClazz = genSplitsInAM ? (initializerClass == null ? MRInputAMSplitGenerator.class : initializerClass) : null; LOG.info("Using initializer class: " + initializerClass); DataSourceDescriptor dsd; if (!genSplitsInAM) { dsd = MRInputHelpers .configureMRInputWithLegacySplitGeneration(stage1Conf, remoteStagingDir, true); } else { if (initializerClass == null) { dsd = MRInputLegacy.createConfigBuilder(stage1Conf, SleepInputFormat.class).build(); } else { InputInitializerDescriptor iid = InputInitializerDescriptor.create(inputInitializerClazz.getName()); dsd = MRInputLegacy.createConfigBuilder(stage1Conf, SleepInputFormat.class) .setCustomInitializerDescriptor(iid).build(); } } Vertex stage1Vertex = Vertex.create("map", ProcessorDescriptor.create( MapProcessor.class.getName()).setUserPayload(stage1Payload), dsd.getNumberOfShards(), Resource.newInstance(256, 1)); stage1Vertex.addDataSource("MRInput", dsd); Vertex stage2Vertex = Vertex.create("ireduce", ProcessorDescriptor.create( ReduceProcessor.class.getName()).setUserPayload(stage2Payload), 1, Resource.newInstance(256, 1)); Vertex stage3Vertex = Vertex.create("reduce", ProcessorDescriptor.create( ReduceProcessor.class.getName()).setUserPayload(stage3Payload), 1, Resource.newInstance(256, 1)); stage3Conf.setBoolean(TezRuntimeConfiguration.TEZ_RUNTIME_CONVERT_USER_PAYLOAD_TO_HISTORY_TEXT, true); DataSinkDescriptor dataSinkDescriptor = MROutputLegacy.createConfigBuilder(stage3Conf, NullOutputFormat.class).build(); Assert.assertFalse(dataSinkDescriptor.getOutputDescriptor().getHistoryText().isEmpty()); stage3Vertex.addDataSink("MROutput", dataSinkDescriptor); // TODO env, resources dag.addVertex(stage1Vertex); dag.addVertex(stage2Vertex); dag.addVertex(stage3Vertex); Edge edge1 = Edge.create(stage1Vertex, stage2Vertex, EdgeProperty.create( DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, OutputDescriptor.create( OrderedPartitionedKVOutput.class.getName()).setUserPayload(stage2Payload), InputDescriptor.create( OrderedGroupedInputLegacy.class.getName()).setUserPayload(stage2Payload))); Edge edge2 = Edge.create(stage2Vertex, stage3Vertex, EdgeProperty.create( DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, OutputDescriptor.create( OrderedPartitionedKVOutput.class.getName()).setUserPayload(stage3Payload), InputDescriptor.create( OrderedGroupedInputLegacy.class.getName()).setUserPayload(stage3Payload))); dag.addEdge(edge1); dag.addEdge(edge2); TezConfiguration tezConf = new TezConfiguration( mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, remoteStagingDir.toString()); DAGClient dagClient = null; boolean reuseSession = reUseTezSession != null; TezClient tezSession = null; if (!dagViaRPC) { Preconditions.checkArgument(reuseSession == false); } if (!reuseSession) { TezConfiguration tempTezconf = new TezConfiguration(tezConf); if (!dagViaRPC) { tempTezconf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, false); } else { tempTezconf.setBoolean(TezConfiguration.TEZ_AM_SESSION_MODE, true); } tezSession = TezClient.create("testsession", tempTezconf); tezSession.start(); } else { tezSession = reUseTezSession; } if(!dagViaRPC) { // TODO Use utility method post TEZ-205 to figure out AM arguments etc. dagClient = tezSession.submitDAG(dag); } if (dagViaRPC && closeSessionBeforeSubmit) { YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(mrrTezCluster.getConfig()); yarnClient.start(); boolean sentKillSession = false; while(true) { Thread.sleep(500l); ApplicationReport appReport = yarnClient.getApplicationReport(tezSession.getAppMasterApplicationId()); if (appReport == null) { continue; } YarnApplicationState appState = appReport.getYarnApplicationState(); if (!sentKillSession) { if (appState == YarnApplicationState.RUNNING) { tezSession.stop(); sentKillSession = true; } } else { if (appState == YarnApplicationState.FINISHED || appState == YarnApplicationState.KILLED || appState == YarnApplicationState.FAILED) { LOG.info("Application completed after sending session shutdown" + ", yarnApplicationState=" + appState + ", finalAppStatus=" + appReport.getFinalApplicationStatus()); Assert.assertEquals(YarnApplicationState.FINISHED, appState); Assert.assertEquals(FinalApplicationStatus.SUCCEEDED, appReport.getFinalApplicationStatus()); break; } } } yarnClient.stop(); return null; } if(dagViaRPC) { LOG.info("Submitting dag to tez session with appId=" + tezSession.getAppMasterApplicationId() + " and Dag Name=" + dag.getName()); if (additionalLocalResources != null) { tezSession.addAppMasterLocalFiles(additionalLocalResources); } dagClient = tezSession.submitDAG(dag); Assert.assertEquals(TezAppMasterStatus.RUNNING, tezSession.getAppMasterStatus()); } DAGStatus dagStatus = dagClient.getDAGStatus(null); while (!dagStatus.isCompleted()) { LOG.info("Waiting for job to complete. Sleeping for 500ms." + " Current state: " + dagStatus.getState()); Thread.sleep(500l); if(killDagWhileRunning && dagStatus.getState() == DAGStatus.State.RUNNING) { LOG.info("Killing running dag/session"); if (dagViaRPC) { tezSession.stop(); } else { dagClient.tryKillDAG(); } } dagStatus = dagClient.getDAGStatus(null); } if (!reuseSession) { tezSession.stop(); } return dagStatus.getState(); } private static LocalResource createLocalResource(FileSystem fc, Path file, LocalResourceType type, LocalResourceVisibility visibility) throws IOException { FileStatus fstat = fc.getFileStatus(file); URL resourceURL = ConverterUtils.getYarnUrlFromPath(fc.resolvePath(fstat .getPath())); long resourceSize = fstat.getLen(); long resourceModificationTime = fstat.getModificationTime(); return LocalResource.newInstance(resourceURL, type, visibility, resourceSize, resourceModificationTime); } @Test(timeout = 60000) public void testVertexGroups() throws Exception { LOG.info("Running Group Test"); Path inPath = new Path(TEST_ROOT_DIR, "in-groups"); Path outPath = new Path(TEST_ROOT_DIR, "out-groups"); FSDataOutputStream out = remoteFs.create(inPath); OutputStreamWriter writer = new OutputStreamWriter(out); writer.write("abcd "); writer.write("efgh "); writer.write("abcd "); writer.write("efgh "); writer.close(); out.close(); UnionExample job = new UnionExample(); if (job.run(inPath.toString(), outPath.toString(), mrrTezCluster.getConfig())) { LOG.info("Success VertexGroups Test"); } else { throw new TezUncheckedException("VertexGroups Test Failed"); } } @Test(timeout = 60000) public void testBroadcastAndOneToOne() throws Exception { LOG.info("Running BroadcastAndOneToOne Test"); BroadcastAndOneToOneExample job = new BroadcastAndOneToOneExample(); if (job.run(mrrTezCluster.getConfig(), true)) { LOG.info("Success BroadcastAndOneToOne Test"); } else { throw new TezUncheckedException("BroadcastAndOneToOne Test Failed"); } } // This class should not be used by more than one test in a single run, since // the path it writes to is not dynamic. private static String RELOCALIZATION_TEST_CLASS_NAME = "AMClassloadTestDummyClass"; public static class MRInputAMSplitGeneratorRelocalizationTest extends MRInputAMSplitGenerator { public MRInputAMSplitGeneratorRelocalizationTest( InputInitializerContext initializerContext) { super(initializerContext); } @Override public List<Event> initialize() throws Exception { MRInputUserPayloadProto userPayloadProto = MRInputHelpers .parseMRInputPayload(getContext().getInputUserPayload()); Configuration conf = TezUtils.createConfFromByteString(userPayloadProto .getConfigurationBytes()); try { Thread.currentThread().setContextClassLoader(TezClassLoader.getInstance()); ReflectionUtils.getClazz(RELOCALIZATION_TEST_CLASS_NAME); LOG.info("Class found"); FileSystem fs = FileSystem.get(conf); fs.mkdirs(new Path("/tmp/relocalizationfilefound")); } catch (TezReflectionException e) { LOG.info("Class not found"); } return super.initialize(); } } private static void createTestJar(OutputStream outStream, String dummyClassName) throws URISyntaxException, IOException { JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); JavaFileObject srcFileObject = new SimpleJavaFileObjectImpl( URI.create("string:///" + dummyClassName + Kind.SOURCE.extension), Kind.SOURCE); StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); compiler.getTask(null, fileManager, null, null, null, Collections.singletonList(srcFileObject)) .call(); JavaFileObject javaFileObject = fileManager.getJavaFileForOutput(StandardLocation.CLASS_OUTPUT, dummyClassName, Kind.CLASS, null); File classFile = new File(dummyClassName + Kind.CLASS.extension); JarOutputStream jarOutputStream = new JarOutputStream(outStream); JarEntry jarEntry = new JarEntry(classFile.getName()); jarEntry.setTime(classFile.lastModified()); jarOutputStream.putNextEntry(jarEntry); InputStream in = javaFileObject.openInputStream(); byte buffer[] = new byte[4096]; while (true) { int nRead = in.read(buffer, 0, buffer.length); if (nRead <= 0) break; jarOutputStream.write(buffer, 0, nRead); } in.close(); jarOutputStream.close(); javaFileObject.delete(); } private static class SimpleJavaFileObjectImpl extends SimpleJavaFileObject { static final String code = "public class AMClassloadTestDummyClass {}"; SimpleJavaFileObjectImpl(URI uri, Kind kind) { super(uri, kind); } @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) { return code; } } }
googleapis/google-cloud-java
37,840
java-video-live-stream/proto-google-cloud-live-stream-v1/src/main/java/com/google/cloud/video/livestream/v1/AutoTranscriptionConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/video/livestream/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.video.livestream.v1; /** * * * <pre> * Advanced configurations for auto-generated text streams. * </pre> * * Protobuf type {@code google.cloud.video.livestream.v1.AutoTranscriptionConfig} */ public final class AutoTranscriptionConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.video.livestream.v1.AutoTranscriptionConfig) AutoTranscriptionConfigOrBuilder { private static final long serialVersionUID = 0L; // Use AutoTranscriptionConfig.newBuilder() to construct. private AutoTranscriptionConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AutoTranscriptionConfig() { displayTiming_ = 0; qualityPreset_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AutoTranscriptionConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.livestream.v1.ResourcesProto .internal_static_google_cloud_video_livestream_v1_AutoTranscriptionConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.livestream.v1.ResourcesProto .internal_static_google_cloud_video_livestream_v1_AutoTranscriptionConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.class, com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.Builder.class); } /** * * * <pre> * Whether auto-generated text streams are displayed synchronously or * asynchronously with the original audio. * </pre> * * Protobuf enum {@code google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming} */ public enum DisplayTiming implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Display timing is not specified. Caption display will be asynchronous by * default. * </pre> * * <code>DISPLAY_TIMING_UNSPECIFIED = 0;</code> */ DISPLAY_TIMING_UNSPECIFIED(0), /** * * * <pre> * Caption will be displayed asynchronous with audio. * </pre> * * <code>ASYNC = 1;</code> */ ASYNC(1), /** * * * <pre> * Caption will be displayed synchronous with audio. This option increases * overall media output latency, and reduces viewing latency between audio * and auto-generated captions. * </pre> * * <code>SYNC = 2;</code> */ SYNC(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Display timing is not specified. Caption display will be asynchronous by * default. * </pre> * * <code>DISPLAY_TIMING_UNSPECIFIED = 0;</code> */ public static final int DISPLAY_TIMING_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Caption will be displayed asynchronous with audio. * </pre> * * <code>ASYNC = 1;</code> */ public static final int ASYNC_VALUE = 1; /** * * * <pre> * Caption will be displayed synchronous with audio. This option increases * overall media output latency, and reduces viewing latency between audio * and auto-generated captions. * </pre> * * <code>SYNC = 2;</code> */ public static final int SYNC_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DisplayTiming valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static DisplayTiming forNumber(int value) { switch (value) { case 0: return DISPLAY_TIMING_UNSPECIFIED; case 1: return ASYNC; case 2: return SYNC; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<DisplayTiming> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<DisplayTiming> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<DisplayTiming>() { public DisplayTiming findValueByNumber(int number) { return DisplayTiming.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.getDescriptor() .getEnumTypes() .get(0); } private static final DisplayTiming[] VALUES = values(); public static DisplayTiming valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private DisplayTiming(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming) } /** * * * <pre> * Presets to tune the latency and quality of auto-generated captions. * </pre> * * Protobuf enum {@code google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset} */ public enum QualityPreset implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Quality Preset is not specified. By default, BALANCED_QUALITY will be * used. * </pre> * * <code>QUALITY_PRESET_UNSPECIFIED = 0;</code> */ QUALITY_PRESET_UNSPECIFIED(0), /** * * * <pre> * Reduce the latency of auto-generated captions. This may reduce the * quality of the captions. * </pre> * * <code>LOW_LATENCY = 1;</code> */ LOW_LATENCY(1), /** * * * <pre> * Default behavior when QualityPreset is not specified. * </pre> * * <code>BALANCED_QUALITY = 2;</code> */ BALANCED_QUALITY(2), /** * * * <pre> * Increases the quality of the auto-generated captions at the cost of * higher latency. * </pre> * * <code>IMPROVED_QUALITY = 3;</code> */ IMPROVED_QUALITY(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Quality Preset is not specified. By default, BALANCED_QUALITY will be * used. * </pre> * * <code>QUALITY_PRESET_UNSPECIFIED = 0;</code> */ public static final int QUALITY_PRESET_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Reduce the latency of auto-generated captions. This may reduce the * quality of the captions. * </pre> * * <code>LOW_LATENCY = 1;</code> */ public static final int LOW_LATENCY_VALUE = 1; /** * * * <pre> * Default behavior when QualityPreset is not specified. * </pre> * * <code>BALANCED_QUALITY = 2;</code> */ public static final int BALANCED_QUALITY_VALUE = 2; /** * * * <pre> * Increases the quality of the auto-generated captions at the cost of * higher latency. * </pre> * * <code>IMPROVED_QUALITY = 3;</code> */ public static final int IMPROVED_QUALITY_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static QualityPreset valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static QualityPreset forNumber(int value) { switch (value) { case 0: return QUALITY_PRESET_UNSPECIFIED; case 1: return LOW_LATENCY; case 2: return BALANCED_QUALITY; case 3: return IMPROVED_QUALITY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<QualityPreset> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<QualityPreset> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<QualityPreset>() { public QualityPreset findValueByNumber(int number) { return QualityPreset.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.getDescriptor() .getEnumTypes() .get(1); } private static final QualityPreset[] VALUES = values(); public static QualityPreset valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private QualityPreset(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset) } public static final int DISPLAY_TIMING_FIELD_NUMBER = 1; private int displayTiming_ = 0; /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for displayTiming. */ @java.lang.Override public int getDisplayTimingValue() { return displayTiming_; } /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The displayTiming. */ @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming getDisplayTiming() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming result = com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming.forNumber( displayTiming_); return result == null ? com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming.UNRECOGNIZED : result; } public static final int QUALITY_PRESET_FIELD_NUMBER = 2; private int qualityPreset_ = 0; /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for qualityPreset. */ @java.lang.Override public int getQualityPresetValue() { return qualityPreset_; } /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The qualityPreset. */ @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset getQualityPreset() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset result = com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset.forNumber( qualityPreset_); return result == null ? com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (displayTiming_ != com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming .DISPLAY_TIMING_UNSPECIFIED .getNumber()) { output.writeEnum(1, displayTiming_); } if (qualityPreset_ != com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset .QUALITY_PRESET_UNSPECIFIED .getNumber()) { output.writeEnum(2, qualityPreset_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (displayTiming_ != com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming .DISPLAY_TIMING_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, displayTiming_); } if (qualityPreset_ != com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset .QUALITY_PRESET_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, qualityPreset_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.video.livestream.v1.AutoTranscriptionConfig)) { return super.equals(obj); } com.google.cloud.video.livestream.v1.AutoTranscriptionConfig other = (com.google.cloud.video.livestream.v1.AutoTranscriptionConfig) obj; if (displayTiming_ != other.displayTiming_) return false; if (qualityPreset_ != other.qualityPreset_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_TIMING_FIELD_NUMBER; hash = (53 * hash) + displayTiming_; hash = (37 * hash) + QUALITY_PRESET_FIELD_NUMBER; hash = (53 * hash) + qualityPreset_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Advanced configurations for auto-generated text streams. * </pre> * * Protobuf type {@code google.cloud.video.livestream.v1.AutoTranscriptionConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.video.livestream.v1.AutoTranscriptionConfig) com.google.cloud.video.livestream.v1.AutoTranscriptionConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.livestream.v1.ResourcesProto .internal_static_google_cloud_video_livestream_v1_AutoTranscriptionConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.livestream.v1.ResourcesProto .internal_static_google_cloud_video_livestream_v1_AutoTranscriptionConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.class, com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.Builder.class); } // Construct using com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayTiming_ = 0; qualityPreset_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.video.livestream.v1.ResourcesProto .internal_static_google_cloud_video_livestream_v1_AutoTranscriptionConfig_descriptor; } @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig getDefaultInstanceForType() { return com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig build() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig buildPartial() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig result = new com.google.cloud.video.livestream.v1.AutoTranscriptionConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayTiming_ = displayTiming_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.qualityPreset_ = qualityPreset_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.video.livestream.v1.AutoTranscriptionConfig) { return mergeFrom((com.google.cloud.video.livestream.v1.AutoTranscriptionConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.video.livestream.v1.AutoTranscriptionConfig other) { if (other == com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.getDefaultInstance()) return this; if (other.displayTiming_ != 0) { setDisplayTimingValue(other.getDisplayTimingValue()); } if (other.qualityPreset_ != 0) { setQualityPresetValue(other.getQualityPresetValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { displayTiming_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 16: { qualityPreset_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int displayTiming_ = 0; /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for displayTiming. */ @java.lang.Override public int getDisplayTimingValue() { return displayTiming_; } /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for displayTiming to set. * @return This builder for chaining. */ public Builder setDisplayTimingValue(int value) { displayTiming_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The displayTiming. */ @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming getDisplayTiming() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming result = com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming.forNumber( displayTiming_); return result == null ? com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The displayTiming to set. * @return This builder for chaining. */ public Builder setDisplayTiming( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; displayTiming_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Whether auto-generated text streams are displayed synchronously * or asynchronously with the original audio. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.DisplayTiming display_timing = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearDisplayTiming() { bitField0_ = (bitField0_ & ~0x00000001); displayTiming_ = 0; onChanged(); return this; } private int qualityPreset_ = 0; /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for qualityPreset. */ @java.lang.Override public int getQualityPresetValue() { return qualityPreset_; } /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for qualityPreset to set. * @return This builder for chaining. */ public Builder setQualityPresetValue(int value) { qualityPreset_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The qualityPreset. */ @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset getQualityPreset() { com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset result = com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset.forNumber( qualityPreset_); return result == null ? com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The qualityPreset to set. * @return This builder for chaining. */ public Builder setQualityPreset( com.google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; qualityPreset_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Tunes the latency and quality of auto-generated captions. * </pre> * * <code> * .google.cloud.video.livestream.v1.AutoTranscriptionConfig.QualityPreset quality_preset = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearQualityPreset() { bitField0_ = (bitField0_ & ~0x00000002); qualityPreset_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.video.livestream.v1.AutoTranscriptionConfig) } // @@protoc_insertion_point(class_scope:google.cloud.video.livestream.v1.AutoTranscriptionConfig) private static final com.google.cloud.video.livestream.v1.AutoTranscriptionConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.video.livestream.v1.AutoTranscriptionConfig(); } public static com.google.cloud.video.livestream.v1.AutoTranscriptionConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AutoTranscriptionConfig> PARSER = new com.google.protobuf.AbstractParser<AutoTranscriptionConfig>() { @java.lang.Override public AutoTranscriptionConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AutoTranscriptionConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AutoTranscriptionConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.video.livestream.v1.AutoTranscriptionConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,841
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CreateEntityTypeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/featurestore_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.CreateEntityType]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.CreateEntityTypeRequest} */ public final class CreateEntityTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CreateEntityTypeRequest) CreateEntityTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateEntityTypeRequest.newBuilder() to construct. private CreateEntityTypeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateEntityTypeRequest() { parent_ = ""; entityTypeId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateEntityTypeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1_CreateEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1_CreateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.class, com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTITY_TYPE_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1.EntityType entityType_; /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> * * @return Whether the entityType field is set. */ @java.lang.Override public boolean hasEntityType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> * * @return The entityType. */ @java.lang.Override public com.google.cloud.aiplatform.v1.EntityType getEntityType() { return entityType_ == null ? com.google.cloud.aiplatform.v1.EntityType.getDefaultInstance() : entityType_; } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.EntityTypeOrBuilder getEntityTypeOrBuilder() { return entityType_ == null ? com.google.cloud.aiplatform.v1.EntityType.getDefaultInstance() : entityType_; } public static final int ENTITY_TYPE_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object entityTypeId_ = ""; /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The entityTypeId. */ @java.lang.Override public java.lang.String getEntityTypeId() { java.lang.Object ref = entityTypeId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); entityTypeId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for entityTypeId. */ @java.lang.Override public com.google.protobuf.ByteString getEntityTypeIdBytes() { java.lang.Object ref = entityTypeId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); entityTypeId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getEntityType()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entityTypeId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, entityTypeId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEntityType()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(entityTypeId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, entityTypeId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.CreateEntityTypeRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.CreateEntityTypeRequest other = (com.google.cloud.aiplatform.v1.CreateEntityTypeRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasEntityType() != other.hasEntityType()) return false; if (hasEntityType()) { if (!getEntityType().equals(other.getEntityType())) return false; } if (!getEntityTypeId().equals(other.getEntityTypeId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasEntityType()) { hash = (37 * hash) + ENTITY_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEntityType().hashCode(); } hash = (37 * hash) + ENTITY_TYPE_ID_FIELD_NUMBER; hash = (53 * hash) + getEntityTypeId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.CreateEntityTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [FeaturestoreService.CreateEntityType][google.cloud.aiplatform.v1.FeaturestoreService.CreateEntityType]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.CreateEntityTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CreateEntityTypeRequest) com.google.cloud.aiplatform.v1.CreateEntityTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1_CreateEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1_CreateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.class, com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntityTypeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; entityType_ = null; if (entityTypeBuilder_ != null) { entityTypeBuilder_.dispose(); entityTypeBuilder_ = null; } entityTypeId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1_CreateEntityTypeRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.CreateEntityTypeRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.CreateEntityTypeRequest build() { com.google.cloud.aiplatform.v1.CreateEntityTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.CreateEntityTypeRequest buildPartial() { com.google.cloud.aiplatform.v1.CreateEntityTypeRequest result = new com.google.cloud.aiplatform.v1.CreateEntityTypeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.CreateEntityTypeRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.entityType_ = entityTypeBuilder_ == null ? entityType_ : entityTypeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.entityTypeId_ = entityTypeId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.CreateEntityTypeRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.CreateEntityTypeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.CreateEntityTypeRequest other) { if (other == com.google.cloud.aiplatform.v1.CreateEntityTypeRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasEntityType()) { mergeEntityType(other.getEntityType()); } if (!other.getEntityTypeId().isEmpty()) { entityTypeId_ = other.entityTypeId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getEntityTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { entityTypeId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Featurestore to create EntityTypes. * Format: * `projects/{project}/locations/{location}/featurestores/{featurestore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.aiplatform.v1.EntityType entityType_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.EntityType, com.google.cloud.aiplatform.v1.EntityType.Builder, com.google.cloud.aiplatform.v1.EntityTypeOrBuilder> entityTypeBuilder_; /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> * * @return Whether the entityType field is set. */ public boolean hasEntityType() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> * * @return The entityType. */ public com.google.cloud.aiplatform.v1.EntityType getEntityType() { if (entityTypeBuilder_ == null) { return entityType_ == null ? com.google.cloud.aiplatform.v1.EntityType.getDefaultInstance() : entityType_; } else { return entityTypeBuilder_.getMessage(); } } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public Builder setEntityType(com.google.cloud.aiplatform.v1.EntityType value) { if (entityTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entityType_ = value; } else { entityTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public Builder setEntityType( com.google.cloud.aiplatform.v1.EntityType.Builder builderForValue) { if (entityTypeBuilder_ == null) { entityType_ = builderForValue.build(); } else { entityTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public Builder mergeEntityType(com.google.cloud.aiplatform.v1.EntityType value) { if (entityTypeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && entityType_ != null && entityType_ != com.google.cloud.aiplatform.v1.EntityType.getDefaultInstance()) { getEntityTypeBuilder().mergeFrom(value); } else { entityType_ = value; } } else { entityTypeBuilder_.mergeFrom(value); } if (entityType_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public Builder clearEntityType() { bitField0_ = (bitField0_ & ~0x00000002); entityType_ = null; if (entityTypeBuilder_ != null) { entityTypeBuilder_.dispose(); entityTypeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public com.google.cloud.aiplatform.v1.EntityType.Builder getEntityTypeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEntityTypeFieldBuilder().getBuilder(); } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ public com.google.cloud.aiplatform.v1.EntityTypeOrBuilder getEntityTypeOrBuilder() { if (entityTypeBuilder_ != null) { return entityTypeBuilder_.getMessageOrBuilder(); } else { return entityType_ == null ? com.google.cloud.aiplatform.v1.EntityType.getDefaultInstance() : entityType_; } } /** * * * <pre> * The EntityType to create. * </pre> * * <code>.google.cloud.aiplatform.v1.EntityType entity_type = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.EntityType, com.google.cloud.aiplatform.v1.EntityType.Builder, com.google.cloud.aiplatform.v1.EntityTypeOrBuilder> getEntityTypeFieldBuilder() { if (entityTypeBuilder_ == null) { entityTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.EntityType, com.google.cloud.aiplatform.v1.EntityType.Builder, com.google.cloud.aiplatform.v1.EntityTypeOrBuilder>( getEntityType(), getParentForChildren(), isClean()); entityType_ = null; } return entityTypeBuilder_; } private java.lang.Object entityTypeId_ = ""; /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The entityTypeId. */ public java.lang.String getEntityTypeId() { java.lang.Object ref = entityTypeId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); entityTypeId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for entityTypeId. */ public com.google.protobuf.ByteString getEntityTypeIdBytes() { java.lang.Object ref = entityTypeId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); entityTypeId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The entityTypeId to set. * @return This builder for chaining. */ public Builder setEntityTypeId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } entityTypeId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearEntityTypeId() { entityTypeId_ = getDefaultInstance().getEntityTypeId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the EntityType, which will become the final * component of the EntityType's resource name. * * This value may be up to 60 characters, and valid characters are * `[a-z0-9_]`. The first character cannot be a number. * * The value must be unique within a featurestore. * </pre> * * <code>string entity_type_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for entityTypeId to set. * @return This builder for chaining. */ public Builder setEntityTypeIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); entityTypeId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CreateEntityTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CreateEntityTypeRequest) private static final com.google.cloud.aiplatform.v1.CreateEntityTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CreateEntityTypeRequest(); } public static com.google.cloud.aiplatform.v1.CreateEntityTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateEntityTypeRequest> PARSER = new com.google.protobuf.AbstractParser<CreateEntityTypeRequest>() { @java.lang.Override public CreateEntityTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateEntityTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateEntityTypeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.CreateEntityTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,858
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SetLabelsImageRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * A request message for Images.SetLabels. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.SetLabelsImageRequest} */ public final class SetLabelsImageRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SetLabelsImageRequest) SetLabelsImageRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SetLabelsImageRequest.newBuilder() to construct. private SetLabelsImageRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SetLabelsImageRequest() { project_ = ""; resource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SetLabelsImageRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_SetLabelsImageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_SetLabelsImageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.SetLabelsImageRequest.class, com.google.cloud.compute.v1.SetLabelsImageRequest.Builder.class); } private int bitField0_; public static final int GLOBAL_SET_LABELS_REQUEST_RESOURCE_FIELD_NUMBER = 319917189; private com.google.cloud.compute.v1.GlobalSetLabelsRequest globalSetLabelsRequestResource_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the globalSetLabelsRequestResource field is set. */ @java.lang.Override public boolean hasGlobalSetLabelsRequestResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The globalSetLabelsRequestResource. */ @java.lang.Override public com.google.cloud.compute.v1.GlobalSetLabelsRequest getGlobalSetLabelsRequestResource() { return globalSetLabelsRequestResource_ == null ? com.google.cloud.compute.v1.GlobalSetLabelsRequest.getDefaultInstance() : globalSetLabelsRequestResource_; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.GlobalSetLabelsRequestOrBuilder getGlobalSetLabelsRequestResourceOrBuilder() { return globalSetLabelsRequestResource_ == null ? com.google.cloud.compute.v1.GlobalSetLabelsRequest.getDefaultInstance() : globalSetLabelsRequestResource_; } public static final int PROJECT_FIELD_NUMBER = 227560217; @SuppressWarnings("serial") private volatile java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_FIELD_NUMBER = 195806222; @SuppressWarnings("serial") private volatile java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ @java.lang.Override public java.lang.String getResource() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ @java.lang.Override public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(319917189, getGlobalSetLabelsRequestResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 319917189, getGlobalSetLabelsRequestResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.SetLabelsImageRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.SetLabelsImageRequest other = (com.google.cloud.compute.v1.SetLabelsImageRequest) obj; if (hasGlobalSetLabelsRequestResource() != other.hasGlobalSetLabelsRequestResource()) return false; if (hasGlobalSetLabelsRequestResource()) { if (!getGlobalSetLabelsRequestResource().equals(other.getGlobalSetLabelsRequestResource())) return false; } if (!getProject().equals(other.getProject())) return false; if (!getResource().equals(other.getResource())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasGlobalSetLabelsRequestResource()) { hash = (37 * hash) + GLOBAL_SET_LABELS_REQUEST_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getGlobalSetLabelsRequestResource().hashCode(); } hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.SetLabelsImageRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.SetLabelsImageRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for Images.SetLabels. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.SetLabelsImageRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SetLabelsImageRequest) com.google.cloud.compute.v1.SetLabelsImageRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_SetLabelsImageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_SetLabelsImageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.SetLabelsImageRequest.class, com.google.cloud.compute.v1.SetLabelsImageRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.SetLabelsImageRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getGlobalSetLabelsRequestResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; globalSetLabelsRequestResource_ = null; if (globalSetLabelsRequestResourceBuilder_ != null) { globalSetLabelsRequestResourceBuilder_.dispose(); globalSetLabelsRequestResourceBuilder_ = null; } project_ = ""; resource_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_SetLabelsImageRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.SetLabelsImageRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.SetLabelsImageRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.SetLabelsImageRequest build() { com.google.cloud.compute.v1.SetLabelsImageRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.SetLabelsImageRequest buildPartial() { com.google.cloud.compute.v1.SetLabelsImageRequest result = new com.google.cloud.compute.v1.SetLabelsImageRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.SetLabelsImageRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.globalSetLabelsRequestResource_ = globalSetLabelsRequestResourceBuilder_ == null ? globalSetLabelsRequestResource_ : globalSetLabelsRequestResourceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.project_ = project_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.resource_ = resource_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.SetLabelsImageRequest) { return mergeFrom((com.google.cloud.compute.v1.SetLabelsImageRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.SetLabelsImageRequest other) { if (other == com.google.cloud.compute.v1.SetLabelsImageRequest.getDefaultInstance()) return this; if (other.hasGlobalSetLabelsRequestResource()) { mergeGlobalSetLabelsRequestResource(other.getGlobalSetLabelsRequestResource()); } if (!other.getProject().isEmpty()) { project_ = other.project_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getResource().isEmpty()) { resource_ = other.resource_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1566449778: { resource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 1566449778 case 1820481738: { project_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 1820481738 case -1735629782: { input.readMessage( getGlobalSetLabelsRequestResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case -1735629782 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.compute.v1.GlobalSetLabelsRequest globalSetLabelsRequestResource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.GlobalSetLabelsRequest, com.google.cloud.compute.v1.GlobalSetLabelsRequest.Builder, com.google.cloud.compute.v1.GlobalSetLabelsRequestOrBuilder> globalSetLabelsRequestResourceBuilder_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the globalSetLabelsRequestResource field is set. */ public boolean hasGlobalSetLabelsRequestResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The globalSetLabelsRequestResource. */ public com.google.cloud.compute.v1.GlobalSetLabelsRequest getGlobalSetLabelsRequestResource() { if (globalSetLabelsRequestResourceBuilder_ == null) { return globalSetLabelsRequestResource_ == null ? com.google.cloud.compute.v1.GlobalSetLabelsRequest.getDefaultInstance() : globalSetLabelsRequestResource_; } else { return globalSetLabelsRequestResourceBuilder_.getMessage(); } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setGlobalSetLabelsRequestResource( com.google.cloud.compute.v1.GlobalSetLabelsRequest value) { if (globalSetLabelsRequestResourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } globalSetLabelsRequestResource_ = value; } else { globalSetLabelsRequestResourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setGlobalSetLabelsRequestResource( com.google.cloud.compute.v1.GlobalSetLabelsRequest.Builder builderForValue) { if (globalSetLabelsRequestResourceBuilder_ == null) { globalSetLabelsRequestResource_ = builderForValue.build(); } else { globalSetLabelsRequestResourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeGlobalSetLabelsRequestResource( com.google.cloud.compute.v1.GlobalSetLabelsRequest value) { if (globalSetLabelsRequestResourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && globalSetLabelsRequestResource_ != null && globalSetLabelsRequestResource_ != com.google.cloud.compute.v1.GlobalSetLabelsRequest.getDefaultInstance()) { getGlobalSetLabelsRequestResourceBuilder().mergeFrom(value); } else { globalSetLabelsRequestResource_ = value; } } else { globalSetLabelsRequestResourceBuilder_.mergeFrom(value); } if (globalSetLabelsRequestResource_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearGlobalSetLabelsRequestResource() { bitField0_ = (bitField0_ & ~0x00000001); globalSetLabelsRequestResource_ = null; if (globalSetLabelsRequestResourceBuilder_ != null) { globalSetLabelsRequestResourceBuilder_.dispose(); globalSetLabelsRequestResourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.GlobalSetLabelsRequest.Builder getGlobalSetLabelsRequestResourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getGlobalSetLabelsRequestResourceFieldBuilder().getBuilder(); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.GlobalSetLabelsRequestOrBuilder getGlobalSetLabelsRequestResourceOrBuilder() { if (globalSetLabelsRequestResourceBuilder_ != null) { return globalSetLabelsRequestResourceBuilder_.getMessageOrBuilder(); } else { return globalSetLabelsRequestResource_ == null ? com.google.cloud.compute.v1.GlobalSetLabelsRequest.getDefaultInstance() : globalSetLabelsRequestResource_; } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.GlobalSetLabelsRequest global_set_labels_request_resource = 319917189 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.GlobalSetLabelsRequest, com.google.cloud.compute.v1.GlobalSetLabelsRequest.Builder, com.google.cloud.compute.v1.GlobalSetLabelsRequestOrBuilder> getGlobalSetLabelsRequestResourceFieldBuilder() { if (globalSetLabelsRequestResourceBuilder_ == null) { globalSetLabelsRequestResourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.GlobalSetLabelsRequest, com.google.cloud.compute.v1.GlobalSetLabelsRequest.Builder, com.google.cloud.compute.v1.GlobalSetLabelsRequestOrBuilder>( getGlobalSetLabelsRequestResource(), getParentForChildren(), isClean()); globalSetLabelsRequestResource_ = null; } return globalSetLabelsRequestResourceBuilder_; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code> * string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"]; * </code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ public java.lang.String getResource() { java.lang.Object ref = resource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The resource to set. * @return This builder for chaining. */ public Builder setResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResource() { resource_ = getDefaultInstance().getResource(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for resource to set. * @return This builder for chaining. */ public Builder setResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resource_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SetLabelsImageRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SetLabelsImageRequest) private static final com.google.cloud.compute.v1.SetLabelsImageRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SetLabelsImageRequest(); } public static com.google.cloud.compute.v1.SetLabelsImageRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SetLabelsImageRequest> PARSER = new com.google.protobuf.AbstractParser<SetLabelsImageRequest>() { @java.lang.Override public SetLabelsImageRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SetLabelsImageRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SetLabelsImageRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.SetLabelsImageRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite
38,015
modules/core/src/main/java/org/apache/ignite/internal/client/thin/TcpClientChannel.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.client.thin; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executor; import java.util.concurrent.ForkJoinPool; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.Consumer; import java.util.function.Function; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.client.ClientAuthenticationException; import org.apache.ignite.client.ClientAuthorizationException; import org.apache.ignite.client.ClientConnectionException; import org.apache.ignite.client.ClientException; import org.apache.ignite.client.ClientFeatureNotSupportedByServerException; import org.apache.ignite.client.ClientReconnectedException; import org.apache.ignite.client.events.ConnectionDescription; import org.apache.ignite.configuration.ClientConfiguration; import org.apache.ignite.internal.binary.BinaryReaderEx; import org.apache.ignite.internal.binary.BinaryUtils; import org.apache.ignite.internal.binary.BinaryWriterEx; import org.apache.ignite.internal.binary.streams.BinaryInputStream; import org.apache.ignite.internal.binary.streams.BinaryOutputStream; import org.apache.ignite.internal.binary.streams.BinaryStreams; import org.apache.ignite.internal.client.monitoring.EventListenerDemultiplexer; import org.apache.ignite.internal.client.thin.io.ClientConnection; import org.apache.ignite.internal.client.thin.io.ClientConnectionMultiplexer; import org.apache.ignite.internal.client.thin.io.ClientConnectionStateHandler; import org.apache.ignite.internal.client.thin.io.ClientMessageHandler; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.odbc.ClientConnectionNodeRecoveryException; import org.apache.ignite.internal.processors.odbc.ClientListenerNioListener; import org.apache.ignite.internal.processors.odbc.ClientListenerRequest; import org.apache.ignite.internal.processors.platform.client.ClientFlag; import org.apache.ignite.internal.processors.platform.client.ClientStatus; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.logger.NullLogger; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.client.thin.ProtocolBitmaskFeature.HEARTBEAT; import static org.apache.ignite.internal.client.thin.ProtocolBitmaskFeature.USER_ATTRIBUTES; import static org.apache.ignite.internal.client.thin.ProtocolVersion.LATEST_VER; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_0_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_1_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_2_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_3_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_4_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_5_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_6_0; import static org.apache.ignite.internal.client.thin.ProtocolVersion.V1_7_0; import static org.apache.ignite.internal.client.thin.ProtocolVersionFeature.AUTHORIZATION; import static org.apache.ignite.internal.client.thin.ProtocolVersionFeature.BITMAP_FEATURES; import static org.apache.ignite.internal.client.thin.ProtocolVersionFeature.PARTITION_AWARENESS; import static org.apache.ignite.internal.processors.platform.client.ClientStatus.SECURITY_VIOLATION; /** * Implements {@link ClientChannel} over TCP. */ class TcpClientChannel implements ClientChannel, ClientMessageHandler, ClientConnectionStateHandler { /** Protocol version used by default on first connection attempt. */ private static final ProtocolVersion DEFAULT_VERSION = LATEST_VER; /** Supported protocol versions. */ private static final Collection<ProtocolVersion> supportedVers = Arrays.asList( V1_7_0, V1_6_0, V1_5_0, V1_4_0, V1_3_0, V1_2_0, V1_1_0, V1_0_0 ); /** GridNioServer has minimum idle check interval of 2 seconds, even if idleTimeout is lower. */ private static final long MIN_RECOMMENDED_HEARTBEAT_INTERVAL = 500; /** Preallocated empty bytes. */ public static final byte[] EMPTY_BYTES = new byte[0]; /** Protocol context. */ private volatile ProtocolContext protocolCtx; /** Server node ID. */ private volatile UUID srvNodeId; /** Server topology version. */ private volatile AffinityTopologyVersion srvTopVer; /** Channel. */ private volatile ClientConnection sock; /** Request id. */ private final AtomicLong reqId = new AtomicLong(1); /** Pending requests. */ private final Map<Long, ClientRequestFuture> pendingReqs = new ConcurrentHashMap<>(); /** Lock to safely close pending requests. */ private final ReadWriteLock pendingReqsLock = new ReentrantReadWriteLock(); /** Topology change listeners. */ private final Collection<Consumer<ClientChannel>> topChangeLsnrs = new CopyOnWriteArrayList<>(); /** Notification listeners. */ @SuppressWarnings("unchecked") private final Map<Long, NotificationListener>[] notificationLsnrs = new Map[ClientNotificationType.values().length]; /** Pending notification. */ @SuppressWarnings("unchecked") private final Map<Long, Queue<T2<ByteBuffer, Exception>>>[] pendingNotifications = new Map[ClientNotificationType.values().length]; /** Guard for notification listeners. */ private final ReadWriteLock notificationLsnrsGuard = new ReentrantReadWriteLock(); /** Closed flag. */ private final AtomicBoolean closed = new AtomicBoolean(); /** Executor for async operation listeners. */ private final Executor asyncContinuationExecutor; /** Send/receive timeout in milliseconds. */ private final int timeout; /** Heartbeat timer. */ private final Timer heartbeatTimer; /** Log. */ private final IgniteLogger log; /** */ private final EventListenerDemultiplexer eventListener; /** */ private final ConnectionDescription connDesc; /** Last send operation timestamp. */ private volatile long lastSendMillis; /** Constructor. */ TcpClientChannel(ClientChannelConfiguration cfg, ClientConnectionMultiplexer connMgr) throws ClientConnectionException, ClientAuthenticationException, ClientProtocolError { validateConfiguration(cfg); log = NullLogger.whenNull(cfg.getLogger()); eventListener = cfg.eventListener(); for (ClientNotificationType type : ClientNotificationType.values()) { if (type.keepNotificationsWithoutListener()) pendingNotifications[type.ordinal()] = new ConcurrentHashMap<>(); } Executor cfgExec = cfg.getAsyncContinuationExecutor(); asyncContinuationExecutor = cfgExec != null ? cfgExec : ForkJoinPool.commonPool(); timeout = cfg.getTimeout(); List<InetSocketAddress> addrs = cfg.getAddresses(); ClientConnectionException connectionEx = null; assert !addrs.isEmpty(); for (InetSocketAddress addr : addrs) { try { sock = connMgr.open(addr, this, this); if (log.isDebugEnabled()) log.debug("Connection established: " + addr); } catch (ClientConnectionException e) { log.info("Can't establish connection with " + addr); connectionEx = U.addSuppressed(connectionEx, e); continue; } try { handshake(DEFAULT_VERSION, cfg.getUserName(), cfg.getUserPassword(), cfg.getUserAttributes()); } catch (ClientConnectionException e) { if (!X.hasCause(e, ClientConnectionNodeRecoveryException.class)) throw e; log.info("Can't establish connection with " + addr + ". Node in recovery mode."); connectionEx = U.addSuppressed(connectionEx, e); U.closeQuiet(sock); sock = null; continue; } break; } if (sock == null) { assert connectionEx != null; throw connectionEx; } assert protocolCtx != null : "Protocol context after handshake is null"; connDesc = new ConnectionDescription(sock.localAddress(), sock.remoteAddress(), protocolCtx.toString(), srvNodeId); heartbeatTimer = protocolCtx.isFeatureSupported(HEARTBEAT) && cfg.getHeartbeatEnabled() ? initHeartbeat(cfg.getHeartbeatInterval()) : null; } /** {@inheritDoc} */ @Override public void close() { close(null); } /** {@inheritDoc} */ @Override public void onMessage(ByteBuffer buf) { processNextMessage(buf); } /** {@inheritDoc} */ @Override public void onDisconnected(@Nullable Exception e) { if (e == null) log.info("Client disconnected"); else log.warning("Client disconnected: " + e.getMessage(), e); close(e); } /** * Close the channel with cause. */ private void close(Exception cause) { if (closed.compareAndSet(false, true)) { // Skip notification if handshake is not successful or completed. ConnectionDescription connDesc0 = connDesc; if (connDesc0 != null) eventListener.onConnectionClosed(connDesc0, cause); if (heartbeatTimer != null) heartbeatTimer.cancel(); U.closeQuiet(sock); pendingReqsLock.writeLock().lock(); try { for (ClientRequestFuture pendingReq : pendingReqs.values()) pendingReq.onDone(new ClientConnectionException("Channel is closed [remoteAddress=" + sock.remoteAddress() + ']', cause)); } finally { pendingReqsLock.writeLock().unlock(); } notificationLsnrsGuard.readLock().lock(); try { for (Map<Long, NotificationListener> lsnrs : notificationLsnrs) { if (lsnrs != null) lsnrs.values().forEach(lsnr -> lsnr.onChannelClosed(cause)); } } finally { notificationLsnrsGuard.readLock().unlock(); } } } /** {@inheritDoc} */ @Override public <T> T service( ClientOperation op, Consumer<PayloadOutputChannel> payloadWriter, Function<PayloadInputChannel, T> payloadReader ) throws ClientException { ClientRequestFuture fut = send(op, payloadWriter); return receive(fut, payloadReader); } /** {@inheritDoc} */ @Override public <T> CompletableFuture<T> serviceAsync( ClientOperation op, Consumer<PayloadOutputChannel> payloadWriter, Function<PayloadInputChannel, T> payloadReader ) { try { ClientRequestFuture fut = send(op, payloadWriter); return receiveAsync(fut, payloadReader); } catch (Throwable t) { CompletableFuture<T> fut = new CompletableFuture<>(); fut.completeExceptionally(t); return fut; } } /** * @param op Operation. * @param payloadWriter Payload writer to stream or {@code null} if request has no payload. * @return Request future. */ private ClientRequestFuture send(ClientOperation op, Consumer<PayloadOutputChannel> payloadWriter) throws ClientException { long id = reqId.getAndIncrement(); long startTimeNanos = System.nanoTime(); PayloadOutputChannel payloadCh = new PayloadOutputChannel(this); try { ClientRequestFuture fut; pendingReqsLock.readLock().lock(); try { if (closed()) { ClientConnectionException err = new ClientConnectionException("Channel is closed [remoteAddress=" + sock.remoteAddress() + ']'); eventListener.onRequestFail(connDesc, id, op.code(), op.name(), System.nanoTime() - startTimeNanos, err); throw err; } fut = new ClientRequestFuture(id, op, startTimeNanos); pendingReqs.put(id, fut); } finally { pendingReqsLock.readLock().unlock(); } eventListener.onRequestStart(connDesc, id, op.code(), op.name()); BinaryOutputStream req = payloadCh.out(); req.writeInt(0); // Reserve an integer for the request size. req.writeShort(op.code()); req.writeLong(id); if (payloadWriter != null) payloadWriter.accept(payloadCh); req.writeInt(0, req.position() - 4); // Actual size. write(req.array(), req.position(), payloadCh::close); return fut; } catch (Throwable t) { pendingReqs.remove(id); // Potential double-close is handled in PayloadOutputChannel. payloadCh.close(); eventListener.onRequestFail(connDesc, id, op.code(), op.name(), System.nanoTime() - startTimeNanos, t); throw t; } } /** * @param pendingReq Request future. * @param payloadReader Payload reader from stream. * @return Received operation payload or {@code null} if response has no payload. */ private <T> T receive(ClientRequestFuture pendingReq, Function<PayloadInputChannel, T> payloadReader) throws ClientException { long reqId = pendingReq.requestId; ClientOperation op = pendingReq.operation; long startTimeNanos = pendingReq.startTimeNanos; try { ByteBuffer payload = timeout > 0 ? pendingReq.get(timeout) : pendingReq.get(); T res = null; if (payload != null && payloadReader != null) res = payloadReader.apply(new PayloadInputChannel(this, payload)); eventListener.onRequestSuccess(connDesc, reqId, op.code(), op.name(), System.nanoTime() - startTimeNanos); return res; } catch (IgniteCheckedException e) { log.warning("Failed to process response: " + e.getMessage(), e); RuntimeException err = convertException(e); eventListener.onRequestFail(connDesc, reqId, op.code(), op.name(), System.nanoTime() - startTimeNanos, err); throw err; } } /** * Receives the response asynchronously. * * @param pendingReq Request future. * @param payloadReader Payload reader from stream. * @return Future for the operation. */ private <T> CompletableFuture<T> receiveAsync(ClientRequestFuture pendingReq, Function<PayloadInputChannel, T> payloadReader) { CompletableFuture<T> fut = new CompletableFuture<>(); long reqId = pendingReq.requestId; ClientOperation op = pendingReq.operation; long startTimeNanos = pendingReq.startTimeNanos; pendingReq.listen(() -> asyncContinuationExecutor.execute(() -> { try { ByteBuffer payload = pendingReq.get(); T res = null; if (payload != null && payloadReader != null) res = payloadReader.apply(new PayloadInputChannel(this, payload)); eventListener.onRequestSuccess(connDesc, reqId, op.code(), op.name(), System.nanoTime() - startTimeNanos); fut.complete(res); } catch (Throwable t) { log.warning("Failed to process response: " + t.getMessage(), t); RuntimeException err = convertException(t); eventListener.onRequestFail(connDesc, reqId, op.code(), op.name(), System.nanoTime() - startTimeNanos, err); fut.completeExceptionally(err); } })); return fut; } /** * Converts exception to {@link org.apache.ignite.internal.processors.platform.client.IgniteClientException}. * @param e Exception to convert. * @return Resulting exception. */ private RuntimeException convertException(Throwable e) { if (e.getCause() instanceof ClientError) return new ClientException(e.getMessage(), e.getCause()); // For every known class derived from ClientException, wrap cause in a new instance. // We could rethrow e.getCause() when instanceof ClientException, // but this results in an incomplete stack trace from the receiver thread. // This is similar to IgniteUtils.exceptionConverters. if (e.getCause() instanceof ClientConnectionException) return new ClientConnectionException(e.getMessage(), e.getCause()); if (e.getCause() instanceof ClientReconnectedException) return new ClientReconnectedException(e.getMessage(), e.getCause()); if (e.getCause() instanceof ClientAuthenticationException) return new ClientAuthenticationException(e.getMessage(), e.getCause()); if (e.getCause() instanceof ClientAuthorizationException) return new ClientAuthorizationException(e.getMessage(), e.getCause()); if (e.getCause() instanceof ClientFeatureNotSupportedByServerException) return new ClientFeatureNotSupportedByServerException(e.getMessage(), e.getCause()); if (e.getCause() instanceof ClientException) return new ClientException(e.getMessage(), e.getCause()); return new ClientException(e.getMessage(), e); } /** * Process next message from the input stream and complete corresponding future. */ private void processNextMessage(ByteBuffer buf) throws ClientProtocolError, ClientConnectionException { BinaryInputStream dataInput = BinaryStreams.inputStream(buf); if (protocolCtx == null) { // Process handshake. pendingReqs.remove(-1L).onDone(buf); return; } Long resId = dataInput.readLong(); int status = 0; ClientOperation notificationOp = null; if (protocolCtx.isFeatureSupported(PARTITION_AWARENESS)) { short flags = dataInput.readShort(); if ((flags & ClientFlag.AFFINITY_TOPOLOGY_CHANGED) != 0) { long topVer = dataInput.readLong(); int minorTopVer = dataInput.readInt(); srvTopVer = new AffinityTopologyVersion(topVer, minorTopVer); for (Consumer<ClientChannel> lsnr : topChangeLsnrs) lsnr.accept(this); } if ((flags & ClientFlag.NOTIFICATION) != 0) { short notificationCode = dataInput.readShort(); notificationOp = ClientOperation.fromCode(notificationCode); if (notificationOp == null || notificationOp.notificationType() == null) throw new ClientProtocolError(String.format("Unexpected notification code [%d]", notificationCode)); } if ((flags & ClientFlag.ERROR) != 0) status = dataInput.readInt(); } else status = dataInput.readInt(); int hdrSize = dataInput.position(); int msgSize = buf.limit(); ByteBuffer res; Exception err; if (status == 0) { err = null; res = msgSize > hdrSize ? buf : null; } else { String errMsg = ClientUtils.createBinaryReader(null, dataInput).readString(); err = status == SECURITY_VIOLATION ? new ClientAuthorizationException(errMsg) : new ClientServerError(errMsg, status, resId); res = null; } if (notificationOp == null) { // Respone received. ClientRequestFuture pendingReq = pendingReqs.remove(resId); if (pendingReq == null) throw new ClientProtocolError(String.format("Unexpected response ID [%s]", resId)); pendingReq.onDone(res, err); } else { // Notification received. ClientNotificationType notificationType = notificationOp.notificationType(); asyncContinuationExecutor.execute(() -> { NotificationListener lsnr = null; notificationLsnrsGuard.readLock().lock(); try { Map<Long, NotificationListener> lsrns = notificationLsnrs[notificationType.ordinal()]; if (lsrns != null) lsnr = lsrns.get(resId); if (notificationType.keepNotificationsWithoutListener() && lsnr == null) { pendingNotifications[notificationType.ordinal()].computeIfAbsent(resId, k -> new ConcurrentLinkedQueue<>()).add(new T2<>(res, err)); } } finally { notificationLsnrsGuard.readLock().unlock(); } if (lsnr != null) lsnr.acceptNotification(res, err); }); } } /** {@inheritDoc} */ @Override public ProtocolContext protocolCtx() { return protocolCtx; } /** {@inheritDoc} */ @Override public UUID serverNodeId() { return srvNodeId; } /** {@inheritDoc} */ @Override public AffinityTopologyVersion serverTopologyVersion() { return srvTopVer; } /** {@inheritDoc} */ @Override public void addTopologyChangeListener(Consumer<ClientChannel> lsnr) { topChangeLsnrs.add(lsnr); } /** {@inheritDoc} */ @Override public void addNotificationListener(ClientNotificationType type, Long rsrcId, NotificationListener lsnr) { Queue<T2<ByteBuffer, Exception>> pendingQueue = null; notificationLsnrsGuard.writeLock().lock(); try { if (closed()) throw new ClientConnectionException("Channel is closed [remoteAddress=" + sock.remoteAddress() + ']'); Map<Long, NotificationListener> lsnrs = notificationLsnrs[type.ordinal()]; if (lsnrs == null) notificationLsnrs[type.ordinal()] = lsnrs = new ConcurrentHashMap<>(); lsnrs.put(rsrcId, lsnr); if (type.keepNotificationsWithoutListener()) pendingQueue = pendingNotifications[type.ordinal()].remove(rsrcId); } finally { notificationLsnrsGuard.writeLock().unlock(); } // Drain pending notifications queue. if (pendingQueue != null) pendingQueue.forEach(n -> lsnr.acceptNotification(n.get1(), n.get2())); } /** {@inheritDoc} */ @Override public void removeNotificationListener(ClientNotificationType type, Long rsrcId) { notificationLsnrsGuard.writeLock().lock(); try { Map<Long, NotificationListener> lsnrs = notificationLsnrs[type.ordinal()]; if (lsnrs == null) return; lsnrs.remove(rsrcId); if (type.keepNotificationsWithoutListener()) pendingNotifications[type.ordinal()].remove(rsrcId); } finally { notificationLsnrsGuard.writeLock().unlock(); } } /** {@inheritDoc} */ @Override public boolean closed() { return closed.get(); } /** Validate {@link ClientConfiguration}. */ private static void validateConfiguration(ClientChannelConfiguration cfg) { String error = null; List<InetSocketAddress> addrs = cfg.getAddresses(); if (F.isEmpty(addrs)) error = "At least one Ignite server node must be specified in the Ignite client configuration"; if (error == null && cfg.getHeartbeatInterval() <= 0) error = "heartbeatInterval cannot be zero or less."; if (error != null) throw new IllegalArgumentException(error); } /** Client handshake. */ private void handshake(ProtocolVersion ver, String user, String pwd, Map<String, String> userAttrs) throws ClientConnectionException, ClientAuthenticationException, ClientProtocolError { long reqId = -1L; long startTime = System.nanoTime(); eventListener.onHandshakeStart(new ConnectionDescription(sock.localAddress(), sock.remoteAddress(), new ProtocolContext(ver).toString(), null)); while (true) { ClientRequestFuture fut; pendingReqsLock.readLock().lock(); try { if (closed()) throw new ClientConnectionException("Channel is closed [remoteAddress=" + sock.remoteAddress() + ']'); fut = new ClientRequestFuture(reqId, ClientOperation.HANDSHAKE); pendingReqs.put(reqId, fut); } finally { pendingReqsLock.readLock().unlock(); } handshakeReq(ver, user, pwd, userAttrs); try { ByteBuffer buf = timeout > 0 ? fut.get(timeout) : fut.get(); BinaryInputStream res = BinaryStreams.inputStream(buf); try (BinaryReaderEx reader = ClientUtils.createBinaryReader(null, res)) { boolean success = res.readBoolean(); if (success) { byte[] features = EMPTY_BYTES; if (ProtocolContext.isFeatureSupported(ver, BITMAP_FEATURES)) features = reader.readByteArray(); protocolCtx = new ProtocolContext(ver, ProtocolBitmaskFeature.enumSet(features)); if (protocolCtx.isFeatureSupported(PARTITION_AWARENESS)) { // Reading server UUID srvNodeId = reader.readUuid(); } if (log.isDebugEnabled()) log.debug("Handshake succeeded [protocolVersion=" + protocolCtx.version() + ", srvNodeId=" + srvNodeId + ']'); eventListener.onHandshakeSuccess( new ConnectionDescription(sock.localAddress(), sock.remoteAddress(), protocolCtx.toString(), srvNodeId), System.nanoTime() - startTime ); break; } else { ProtocolVersion srvVer = new ProtocolVersion(res.readShort(), res.readShort(), res.readShort()); String err = reader.readString(); int errCode = ClientStatus.FAILED; if (res.remaining() > 0) errCode = reader.readInt(); if (log.isDebugEnabled()) log.debug("Handshake failed [protocolVersion=" + srvVer + ", err=" + err + ", errCode=" + errCode + ']'); RuntimeException resultErr = null; if (errCode == ClientStatus.AUTH_FAILED) resultErr = new ClientAuthenticationException(err); else if (errCode == ClientStatus.NODE_IN_RECOVERY_MODE) throw new ClientConnectionNodeRecoveryException(err); else if (ver.equals(srvVer)) resultErr = new ClientProtocolError(err); else if (!supportedVers.contains(srvVer) || (!ProtocolContext.isFeatureSupported(srvVer, AUTHORIZATION) && !F.isEmpty(user))) { // Server version is not supported by this client OR server version is less than 1.1.0 supporting // authentication and authentication is required. resultErr = new ClientProtocolError(String.format( "Protocol version mismatch: client %s / server %s. Server details: %s", ver, srvVer, err )); } if (resultErr != null) { ConnectionDescription connDesc = new ConnectionDescription(sock.localAddress(), sock.remoteAddress(), new ProtocolContext(ver).toString(), null); long elapsedNanos = System.nanoTime() - startTime; eventListener.onHandshakeFail(connDesc, elapsedNanos, resultErr); throw resultErr; } else { // Retry with server version. if (log.isDebugEnabled()) log.debug("Retrying handshake with server version [protocolVersion=" + srvVer + ']'); ver = srvVer; } } } } catch (IOException | IgniteCheckedException e) { ClientException err; if (e instanceof IOException) err = handleIOError((IOException)e); else err = new ClientConnectionException(e.getMessage() + " [remoteAddress=" + sock.remoteAddress() + ']', e); eventListener.onHandshakeFail( new ConnectionDescription(sock.localAddress(), sock.remoteAddress(), new ProtocolContext(ver).toString(), null), System.nanoTime() - startTime, err ); throw err; } } } /** Send handshake request. */ private void handshakeReq(ProtocolVersion proposedVer, String user, String pwd, Map<String, String> userAttrs) throws ClientConnectionException { try (BinaryWriterEx writer = BinaryUtils.writer(U.binaryContext(null), BinaryStreams.outputStream(32), null)) { ProtocolContext protocolCtx = protocolContextFromVersion(proposedVer); writer.writeInt(0); // reserve an integer for the request size writer.writeByte((byte)ClientListenerRequest.HANDSHAKE); writer.writeShort(proposedVer.major()); writer.writeShort(proposedVer.minor()); writer.writeShort(proposedVer.patch()); writer.writeByte(ClientListenerNioListener.THIN_CLIENT); if (protocolCtx.isFeatureSupported(BITMAP_FEATURES)) { byte[] features = ProtocolBitmaskFeature.featuresAsBytes(protocolCtx.features()); writer.writeByteArray(features); } if (protocolCtx.isFeatureSupported(USER_ATTRIBUTES)) writer.writeMap(userAttrs); boolean authSupported = protocolCtx.isFeatureSupported(AUTHORIZATION); if (authSupported && user != null && !user.isEmpty()) { writer.writeString(user); writer.writeString(pwd); } writer.out().writeInt(0, writer.out().position() - 4); // actual size write(writer.out().arrayCopy(), writer.out().position(), null); } } /** * @param ver Protocol version. * @return Protocol context for a version. */ private ProtocolContext protocolContextFromVersion(ProtocolVersion ver) { EnumSet<ProtocolBitmaskFeature> features = null; if (ProtocolContext.isFeatureSupported(ver, BITMAP_FEATURES)) features = ProtocolBitmaskFeature.allFeaturesAsEnumSet(); return new ProtocolContext(ver, features); } /** Write bytes to the output stream. */ private void write(byte[] bytes, int len, @Nullable Runnable onDone) throws ClientConnectionException { ByteBuffer buf = ByteBuffer.wrap(bytes, 0, len); try { sock.send(buf, onDone); lastSendMillis = System.currentTimeMillis(); } catch (IgniteCheckedException e) { throw new ClientConnectionException(e.getMessage() + " [remoteAddress=" + sock.remoteAddress() + ']', e); } } /** * @param ex IO exception (cause). */ private ClientException handleIOError(@Nullable IOException ex) { return handleIOError(S.toString(ConnectionDescription.class, connDesc), ex); } /** * @param chInfo Additional channel info * @param ex IO exception (cause). */ private ClientException handleIOError(String chInfo, @Nullable IOException ex) { return new ClientConnectionException("Ignite cluster is unavailable [" + chInfo + ']', ex); } /** * Initializes heartbeats. * * @param configuredInterval Configured heartbeat interval, in milliseconds. * @return Heartbeat timer. */ private Timer initHeartbeat(long configuredInterval) { long heartbeatInterval = getHeartbeatInterval(configuredInterval); Timer timer = new Timer("tcp-client-channel-heartbeats-" + hashCode()); timer.schedule(new HeartbeatTask(heartbeatInterval), heartbeatInterval, heartbeatInterval); return timer; } /** * Gets the heartbeat interval based on the configured value and served-side idle timeout. * * @param configuredInterval Configured interval. * @return Resolved interval. */ private long getHeartbeatInterval(long configuredInterval) { long serverIdleTimeoutMs = service(ClientOperation.GET_IDLE_TIMEOUT, null, in -> in.in().readLong()); if (serverIdleTimeoutMs <= 0) { if (log.isInfoEnabled()) log.info("Server-side IdleTimeout is not set, using configured ClientConfiguration.heartbeatInterval: " + configuredInterval); return configuredInterval; } long recommendedHeartbeatInterval = serverIdleTimeoutMs / 3; if (recommendedHeartbeatInterval < MIN_RECOMMENDED_HEARTBEAT_INTERVAL) recommendedHeartbeatInterval = MIN_RECOMMENDED_HEARTBEAT_INTERVAL; long res = Math.min(configuredInterval, recommendedHeartbeatInterval); if (log.isInfoEnabled()) log.info("Using heartbeat interval: " + res + " (configured: " + configuredInterval + ", recommended: " + recommendedHeartbeatInterval + ", server-side IdleTimeout: " + serverIdleTimeoutMs + ")"); return res; } /** {@inheritDoc} */ @Override public String toString() { return "TcpClientChannel [srvNodeId=" + srvNodeId + ", addr=" + sock.remoteAddress() + ']'; } /** */ private static class ClientRequestFuture extends GridFutureAdapter<ByteBuffer> { /** */ final long startTimeNanos; /** */ final long requestId; /** */ final ClientOperation operation; /** */ ClientRequestFuture(long requestId, ClientOperation op) { this(requestId, op, System.nanoTime()); } /** */ ClientRequestFuture(long requestId, ClientOperation op, long startTimeNanos) { this.requestId = requestId; operation = op; this.startTimeNanos = startTimeNanos; } } /** * Sends heartbeat messages. */ private class HeartbeatTask extends TimerTask { /** Heartbeat interval. */ private final long interval; /** Constructor. */ public HeartbeatTask(long interval) { this.interval = interval; } /** {@inheritDoc} */ @Override public void run() { try { if (System.currentTimeMillis() - lastSendMillis > interval) service(ClientOperation.HEARTBEAT, null, null); } catch (Throwable ignored) { // Ignore failed heartbeats. } } } }
apache/hive
37,997
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore; import java.lang.reflect.Method; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.ZKDeRegisterWatcher; import org.apache.hadoop.hive.common.ZooKeeperHiveHelper; import org.apache.hadoop.hive.metastore.ServletSecurity.AuthType; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; import org.apache.hadoop.hive.metastore.leader.HouseKeepingTasks; import org.apache.hadoop.hive.metastore.leader.CMClearer; import org.apache.hadoop.hive.metastore.leader.CompactorPMF; import org.apache.hadoop.hive.metastore.leader.LeaderElectionContext; import org.apache.hadoop.hive.metastore.leader.CompactorTasks; import org.apache.hadoop.hive.metastore.leader.StatsUpdaterTask; import org.apache.hadoop.hive.metastore.metrics.JvmPauseMonitor; import org.apache.hadoop.hive.metastore.metrics.Metrics; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager; import org.apache.hadoop.hive.metastore.utils.LogUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.metastore.utils.MetastoreVersionInfo; import org.apache.hadoop.hive.metastore.utils.SecurityUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.hive.common.IPStackUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LoggerContext; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.ServerContext; import org.apache.thrift.server.TServer; import org.apache.thrift.server.TServerEventHandler; import org.apache.thrift.server.TServlet; import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; import org.eclipse.jetty.security.ConstraintMapping; import org.eclipse.jetty.security.ConstraintSecurityHandler; import org.eclipse.jetty.server.HttpChannel; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.security.Constraint; import org.eclipse.jetty.util.thread.ExecutorThreadPool; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import javax.servlet.Servlet; import javax.servlet.ServletRequestEvent; import javax.servlet.ServletRequestListener; /** * TODO:pc remove application logic to a separate interface. */ public class HiveMetaStore extends ThriftHiveMetastore { public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStore.class); // boolean that tells if the HiveMetaStore (remote) server is being used. // Can be used to determine if the calls to metastore api (HMSHandler) are being made with // embedded metastore or a remote one private static boolean isMetaStoreRemote = false; private static ShutdownHookManager shutdownHookMgr; /** MM write states. */ public static final char MM_WRITE_OPEN = 'o', MM_WRITE_COMMITTED = 'c', MM_WRITE_ABORTED = 'a'; static HadoopThriftAuthBridge.Server saslServer; private static MetastoreDelegationTokenManager delegationTokenManager; static boolean useSasl; private static ZooKeeperHiveHelper zooKeeperHelper = null; private static String msHost = null; private static ThriftServer thriftServer; /** the servlet server. */ private static Server servletServer = null; /** the port and path of the property servlet. */ private static int propertyServletPort = -1; /** * Gets the embedded servlet server. * @return the server instance or null */ public static Server getServletServer() { return servletServer; } /** * Gets the property servlet connector port. * <p>If configuration is 0, this port is allocated by the system.</p> * @return the connector port or -1 if not configured */ public static int getPropertyServletPort() { return propertyServletPort; } public static boolean isRenameAllowed(Database srcDB, Database destDB) { if (!srcDB.getName().equalsIgnoreCase(destDB.getName())) { if (ReplChangeManager.isSourceOfReplication(srcDB) || ReplChangeManager.isSourceOfReplication(destDB)) { return false; } } return true; } /** * Create HMS handler for embedded metastore. * * <h1>IMPORTANT</h1> * * This method is called indirectly by HiveMetastoreClient using reflection. * It can not be removed and its arguments can't be changed without matching * change in HiveMetastoreClient. * * @param conf configuration to use * @throws MetaException */ static Iface newHMSHandler(Configuration conf) throws MetaException { HMSHandler baseHandler = new HMSHandler("hive client", conf); return HMSHandlerProxyFactory.getProxy(conf, baseHandler, true); } /** * Discard a current delegation token. * * @param tokenStrForm * the token in string form */ public static void cancelDelegationToken(String tokenStrForm) throws IOException { delegationTokenManager.cancelDelegationToken(tokenStrForm); } /** * Get a new delegation token. * * @param renewer * the designated renewer */ public static String getDelegationToken(String owner, String renewer, String remoteAddr) throws IOException, InterruptedException { return delegationTokenManager.getDelegationToken(owner, renewer, remoteAddr); } /** * @return true if remote metastore has been created */ public static boolean isMetaStoreRemote() { return isMetaStoreRemote; } /** * Renew a delegation token to extend its lifetime. * * @param tokenStrForm * the token in string form */ public static long renewDelegationToken(String tokenStrForm) throws IOException { return delegationTokenManager.renewDelegationToken(tokenStrForm); } /* Interface to encapsulate Http and binary thrift server for HiveMetastore */ private interface ThriftServer { void start() throws Throwable; boolean isRunning(); IHMSHandler getHandler(); } /** * @param args */ public static void main(String[] args) throws Throwable { final Configuration conf = MetastoreConf.newMetastoreConf(); shutdownHookMgr = ShutdownHookManager.get(); HiveMetastoreCli cli = new HiveMetastoreCli(conf); cli.parse(args); final boolean isCliVerbose = cli.isVerbose(); // NOTE: It is critical to do this prior to initializing log4j, otherwise // any log specific settings via hiveconf will be ignored Properties hiveconf = cli.addHiveconfToSystemProperties(); // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded try { // If the log4j.configuration property hasn't already been explicitly set, // use Hive's default log4j configuration if (System.getProperty("log4j.configurationFile") == null) { LogUtils.initHiveLog4j(conf); } else { //reconfigure log4j after settings via hiveconf are write into System Properties LoggerContext context = (LoggerContext)LogManager.getContext(false); context.reconfigure(); } } catch (LogUtils.LogInitializationException e) { LOG.warn(e.getMessage()); } startupShutdownMessage(HiveMetaStore.class, args, LOG); try { String msg = "Starting hive metastore on port %d. PID is %d" .formatted(cli.getPort(), ProcessHandle.current().pid()); LOG.info(msg); if (isCliVerbose) { System.err.println(msg); } // set all properties specified on the command line for (Map.Entry<Object, Object> item : hiveconf.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } //for metastore process, all metastore call should be embedded metastore call. conf.set(ConfVars.THRIFT_URIS.getHiveName(), ""); conf.set(ConfVars.THRIFT_URIS.getVarname(), ""); // Add shutdown hook. shutdownHookMgr.addShutdownHook(() -> { String shutdownMsg = "Shutting down hive metastore at " + getHostname(); LOG.info(shutdownMsg); if (isCliVerbose) { System.err.println(shutdownMsg); } // servlet server if (servletServer != null) { try { servletServer.stop(); } catch (Exception e) { LOG.error("Error stopping Property Map server.", e); } } // metrics if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) { try { Metrics.shutdown(); } catch (Exception e) { LOG.error("error in Metrics deinit: " + e.getClass().getName() + " " + e.getMessage(), e); } } // Remove from zookeeper if it's configured try { if (MetastoreConf.getVar(conf, ConfVars.THRIFT_SERVICE_DISCOVERY_MODE) .equalsIgnoreCase("zookeeper")) { zooKeeperHelper.removeServerInstanceFromZooKeeper(); } } catch (Exception e) { LOG.error("Error removing znode for this metastore instance from ZooKeeper.", e); } ThreadPool.shutdown(); }, 10); //Start Metrics for Standalone (Remote) Mode if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) { try { Metrics.initialize(conf); } catch (Exception e) { // log exception, but ignore inability to start LOG.error("error in Metrics init: " + e.getClass().getName() + " " + e.getMessage(), e); } } startMetaStore(cli.getPort(), HadoopThriftAuthBridge.getBridge(), conf, true, null); } catch (Throwable t) { // Catch the exception, log it and rethrow it. LOG.error("Metastore Thrift Server threw an exception...", t); throw t; } } /** * Start the metastore store. * @param port * @param bridge * @param conf * @throws Throwable */ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, Configuration conf) throws Throwable { startMetaStore(port, bridge, conf, false, null); } // TODO: Is it worth trying to use a server that supports HTTP/2? // Does the Thrift http client support this? private static ThriftServer startHttpMetastore(int port, Configuration conf) throws Exception { LOG.info("Attempting to start http metastore server on port: {}", port); // login principal if security is enabled ServletSecurity.loginServerPrincipal(conf); long maxMessageSize = MetastoreConf.getLongVar(conf, ConfVars.SERVER_MAX_MESSAGE_SIZE); int minWorkerThreads = MetastoreConf.getIntVar(conf, ConfVars.SERVER_MIN_THREADS); int maxWorkerThreads = MetastoreConf.getIntVar(conf, ConfVars.SERVER_MAX_THREADS); // Server thread pool // Start with minWorkerThreads, expand till maxWorkerThreads and reject // subsequent requests // TODO: Add a config for keepAlive time of threads ? ExecutorService executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads, 60L, TimeUnit.SECONDS, new SynchronousQueue<>(), r -> { Thread thread = new Thread(r); thread.setDaemon(true); thread.setName("Metastore-HttpHandler-Pool: Thread-" + thread.getId()); return thread; }) { @Override public void setThreadFactory(ThreadFactory threadFactory) { // Avoid ExecutorThreadPool overriding the ThreadFactory LOG.warn("Ignore setting the thread factory as the pool has already provided his own: {}", getThreadFactory()); } }; ExecutorThreadPool threadPool = new ExecutorThreadPool((ThreadPoolExecutor) executorService); // HTTP Server org.eclipse.jetty.server.Server server = new Server(threadPool); server.setStopAtShutdown(true); ServerConnector connector; final HttpConfiguration httpServerConf = new HttpConfiguration(); httpServerConf.setRequestHeaderSize( MetastoreConf.getIntVar(conf, ConfVars.METASTORE_THRIFT_HTTP_REQUEST_HEADER_SIZE)); httpServerConf.setResponseHeaderSize( MetastoreConf.getIntVar(conf, ConfVars.METASTORE_THRIFT_HTTP_RESPONSE_HEADER_SIZE)); httpServerConf.setSendServerVersion(false); httpServerConf.setSendXPoweredBy(false); final HttpConnectionFactory http = new HttpConnectionFactory(httpServerConf); final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(conf); if (sslContextFactory != null) { connector = new ServerConnector(server, sslContextFactory, http); } else { connector = new ServerConnector(server, http); } connector.setPort(port); connector.setReuseAddress(true); // TODO: What should the idle timeout be for the metastore? Currently it is 30 minutes long maxIdleTimeout = MetastoreConf.getTimeVar(conf, ConfVars.METASTORE_THRIFT_HTTP_MAX_IDLE_TIME, TimeUnit.MILLISECONDS); connector.setIdleTimeout(maxIdleTimeout); // TODO: AcceptQueueSize needs to be higher for HMS connector.setAcceptQueueSize(maxWorkerThreads); // TODO: Connection keepalive configuration? server.addConnector(connector); TProcessor processor; boolean useCompactProtocol = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_COMPACT_PROTOCOL); final TProtocolFactory protocolFactory; if (useCompactProtocol) { protocolFactory = new TCompactProtocol.Factory(); } else { protocolFactory = new TBinaryProtocol.Factory(); } HMSHandler baseHandler = new HMSHandler("new db based metaserver", conf); IHMSHandler handler = HMSHandlerProxyFactory.getProxy(conf, baseHandler, false); processor = new ThriftHiveMetastore.Processor<>(handler); LOG.info("Starting DB backed MetaStore Server with generic processor"); boolean jwt = MetastoreConf.getVar(conf, ConfVars.THRIFT_METASTORE_AUTHENTICATION).equalsIgnoreCase("jwt"); AuthType authType = jwt ? AuthType.JWT : AuthType.SIMPLE; ServletSecurity security = new ServletSecurity(authType, conf); Servlet thriftHttpServlet = security.proxy(new TServlet(processor, protocolFactory)); boolean directSqlEnabled = MetastoreConf.getBoolVar(conf, ConfVars.TRY_DIRECT_SQL); HMSHandler.LOG.info("Direct SQL optimization = {}", directSqlEnabled); String httpPath = MetaStoreUtils.getHttpPath( MetastoreConf.getVar(conf, ConfVars.METASTORE_CLIENT_THRIFT_HTTP_PATH)); ServletContextHandler context = new ServletContextHandler( ServletContextHandler.NO_SECURITY | ServletContextHandler.NO_SESSIONS); // Tons of stuff skipped as compared the HS2. // Sesions, XSRF, Compression, path configuration, etc. constraintHttpMethods(context, false); context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); // adding a listener on servlet request so as to clean up // rawStore when http request is completed context.addEventListener(new ServletRequestListener() { @Override public void requestDestroyed(ServletRequestEvent servletRequestEvent) { Request baseRequest = Request.getBaseRequest(servletRequestEvent.getServletRequest()); HttpChannel channel = baseRequest.getHttpChannel(); if (LOG.isDebugEnabled()) { LOG.debug("request: " + baseRequest + " destroyed " + ", http channel: " + channel); } HMSHandler.cleanupHandlerContext(); } @Override public void requestInitialized(ServletRequestEvent servletRequestEvent) { Request baseRequest = Request.getBaseRequest(servletRequestEvent.getServletRequest()); HttpChannel channel = baseRequest.getHttpChannel(); if (LOG.isDebugEnabled()) { LOG.debug("request: " + baseRequest + " initialized " + ", http channel: " + channel); } } }); server.setHandler(context); return new ThriftServer() { @Override public void start() throws Throwable { HMSHandler.LOG.debug("Starting HTTPServer for HMS"); server.setStopAtShutdown(true); server.start(); HMSHandler.LOG.info("Started the new HTTPServer for metastore on port [{}]...", port); HMSHandler.LOG.info("Options.minWorkerThreads = {}", minWorkerThreads); HMSHandler.LOG.info("Options.maxWorkerThreads = {}", maxWorkerThreads); HMSHandler.LOG.info("Enable SSL = {}", (sslContextFactory != null)); server.join(); } @Override public boolean isRunning() { return server != null && server.isRunning(); } @Override public IHMSHandler getHandler() { return handler; } }; } private static ThriftServer startBinaryMetastore(int port, HadoopThriftAuthBridge bridge, Configuration conf) throws Throwable { // Server will create new threads up to max as necessary. After an idle // period, it will destroy threads to keep the number of threads in the // pool to min. long maxMessageSize = MetastoreConf.getLongVar(conf, ConfVars.SERVER_MAX_MESSAGE_SIZE); int minWorkerThreads = MetastoreConf.getIntVar(conf, ConfVars.SERVER_MIN_THREADS); int maxWorkerThreads = MetastoreConf.getIntVar(conf, ConfVars.SERVER_MAX_THREADS); boolean useCompactProtocol = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_COMPACT_PROTOCOL); boolean useSSL = MetastoreConf.getBoolVar(conf, ConfVars.USE_SSL); HMSHandler baseHandler = new HMSHandler("new db based metaserver", conf); AuthFactory authFactory = new AuthFactory(bridge, conf, baseHandler); useSasl = authFactory.isSASLWithKerberizedHadoop(); if (useSasl) { // we are in secure mode. Login using keytab String kerberosName = SecurityUtil .getServerPrincipal(MetastoreConf.getVar(conf, ConfVars.KERBEROS_PRINCIPAL), "0.0.0.0"); String keyTabFile = MetastoreConf.getVar(conf, ConfVars.KERBEROS_KEYTAB_FILE); UserGroupInformation.loginUserFromKeytab(kerberosName, keyTabFile); saslServer = authFactory.getSaslServer(); delegationTokenManager = authFactory.getDelegationTokenManager(); } TProcessor processor; TTransportFactory transFactory = authFactory.getAuthTransFactory(useSSL, conf); final TProtocolFactory protocolFactory; final TProtocolFactory inputProtoFactory; if (useCompactProtocol) { protocolFactory = new TCompactProtocol.Factory(); inputProtoFactory = new TCompactProtocol.Factory(maxMessageSize, maxMessageSize); } else { protocolFactory = new TBinaryProtocol.Factory(); inputProtoFactory = new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize); } msHost = MetastoreConf.getVar(conf, ConfVars.THRIFT_BIND_HOST); if (msHost != null && !msHost.trim().isEmpty()) { LOG.info("Binding host " + msHost + " for metastore server"); } IHMSHandler handler = HMSHandlerProxyFactory.getProxy(conf, baseHandler, false); TServerSocket serverSocket; if (useSasl) { processor = saslServer.wrapProcessor( new ThriftHiveMetastore.Processor<>(handler)); LOG.info("Starting DB backed MetaStore Server in Secure Mode"); } else { // we are in unsecure mode. if (MetastoreConf.getBoolVar(conf, ConfVars.EXECUTE_SET_UGI)) { processor = new TUGIBasedProcessor<>(handler); LOG.info("Starting DB backed MetaStore Server with SetUGI enabled"); } else { processor = new TSetIpAddressProcessor<>(handler); LOG.info("Starting DB backed MetaStore Server"); } } if (!useSSL) { serverSocket = SecurityUtils.getServerSocket(msHost, port); } else { String keyStorePath = MetastoreConf.getVar(conf, ConfVars.SSL_KEYSTORE_PATH).trim(); if (keyStorePath.isEmpty()) { throw new IllegalArgumentException(ConfVars.SSL_KEYSTORE_PATH.toString() + " Not configured for SSL connection"); } String keyStorePassword = MetastoreConf.getPassword(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_PASSWORD); String keyStoreType = MetastoreConf.getVar(conf, ConfVars.SSL_KEYSTORE_TYPE).trim(); String keyStoreAlgorithm = MetastoreConf.getVar(conf, ConfVars.SSL_KEYMANAGERFACTORY_ALGORITHM).trim(); // enable SSL support for HMS List<String> sslVersionBlacklist = new ArrayList<>(); for (String sslVersion : MetastoreConf.getVar(conf, ConfVars.SSL_PROTOCOL_BLACKLIST).split(",")) { sslVersionBlacklist.add(sslVersion); } serverSocket = SecurityUtils.getServerSSLSocket(msHost, port, keyStorePath, keyStorePassword, keyStoreType, keyStoreAlgorithm, sslVersionBlacklist); } ExecutorService executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads, 60L, TimeUnit.SECONDS, new SynchronousQueue<>(), r -> { Thread thread = new Thread(r); thread.setDaemon(true); thread.setName("Metastore-Handler-Pool: Thread-" + thread.getId()); return thread; }); TThreadPoolServer.Args args = new TThreadPoolServer.Args(new TServerSocket(serverSocket.getServerSocket()) { @Override public TSocket accept() throws TTransportException { TSocket ts = super.accept(); // get the limit from the configuration for every new connection int maxThriftMessageSize = (int) MetastoreConf.getSizeVar( conf, MetastoreConf.ConfVars.THRIFT_METASTORE_CLIENT_MAX_MESSAGE_SIZE); HMSHandler.LOG.debug("Thrift maxMessageSize = {}", maxThriftMessageSize); if (maxThriftMessageSize > 0) { ts.getConfiguration().setMaxMessageSize(maxThriftMessageSize); } return ts; } }) .processor(processor) .transportFactory(transFactory) .protocolFactory(protocolFactory) .inputProtocolFactory(inputProtoFactory) .executorService(executorService); TServer tServer = new TThreadPoolServer(args); TServerEventHandler tServerEventHandler = new TServerEventHandler() { @Override public void preServe() { } @Override public ServerContext createContext(TProtocol tProtocol, TProtocol tProtocol1) { Metrics.getOpenConnectionsCounter().inc(); return null; } @Override public void deleteContext(ServerContext serverContext, TProtocol tProtocol, TProtocol tProtocol1) { Metrics.getOpenConnectionsCounter().dec(); // If the IMetaStoreClient#close was called, HMSHandler#shutdown would have already // cleaned up thread local RawStore. Otherwise, do it now. HMSHandler.cleanupHandlerContext(); } @Override public void processContext(ServerContext serverContext, TTransport tTransport, TTransport tTransport1) { } }; tServer.setServerEventHandler(tServerEventHandler); return new ThriftServer() { @Override public void start() throws Throwable { HMSHandler.LOG.info("Started the new metaserver on port [{}]...", port); HMSHandler.LOG.info("Options.minWorkerThreads = {}", minWorkerThreads); HMSHandler.LOG.info("Options.maxWorkerThreads = {}", maxWorkerThreads); HMSHandler.LOG.info("Enable SSL = {}", useSSL); tServer.serve(); } @Override public boolean isRunning() { return tServer != null && tServer.isServing(); } @Override public IHMSHandler getHandler() { return handler; } }; } private static void constraintHttpMethods(ServletContextHandler ctxHandler, boolean allowOptionsMethod) { Constraint c = new Constraint(); c.setAuthenticate(true); ConstraintMapping cmt = new ConstraintMapping(); cmt.setConstraint(c); cmt.setMethod("TRACE"); cmt.setPathSpec("/*"); ConstraintSecurityHandler securityHandler = new ConstraintSecurityHandler(); if (!allowOptionsMethod) { ConstraintMapping cmo = new ConstraintMapping(); cmo.setConstraint(c); cmo.setMethod("OPTIONS"); cmo.setPathSpec("/*"); securityHandler.setConstraintMappings(new ConstraintMapping[] {cmt, cmo}); } else { securityHandler.setConstraintMappings(new ConstraintMapping[] {cmt}); } ctxHandler.setSecurityHandler(securityHandler); } /** * Start Metastore based on a passed {@link HadoopThriftAuthBridge}. * * @param port The port on which the Thrift server will start to serve * @param bridge * @param conf Configuration overrides * @param startMetaStoreThreads Start the background threads (initiator, cleaner, statsupdater, etc.) * @param startedBackgroundThreads If startMetaStoreThreads is true, this AtomicBoolean will be switched to true, * when all of the background threads are scheduled. Useful for testing purposes to wait * until the MetaStore is fully initialized. * @throws Throwable */ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, Configuration conf, boolean startMetaStoreThreads, AtomicBoolean startedBackgroundThreads) throws Throwable { isMetaStoreRemote = true; if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL) && MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.THRIFT_ZOOKEEPER_USE_KERBEROS)) { String principal = MetastoreConf.getVar(conf, ConfVars.KERBEROS_PRINCIPAL); String keyTab = MetastoreConf.getVar(conf, ConfVars.KERBEROS_KEYTAB_FILE); SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keyTab); } String transportMode = MetastoreConf.getVar(conf, ConfVars.THRIFT_TRANSPORT_MODE, "binary"); boolean isHttpTransport = transportMode.equalsIgnoreCase("http"); if (isHttpTransport) { thriftServer = startHttpMetastore(port, conf); } else { thriftServer = startBinaryMetastore(port, bridge, conf); } boolean directSqlEnabled = MetastoreConf.getBoolVar(conf, ConfVars.TRY_DIRECT_SQL); LOG.info("Direct SQL optimization = {}", directSqlEnabled); if (startMetaStoreThreads) { Lock metaStoreThreadsLock = new ReentrantLock(); Condition startCondition = metaStoreThreadsLock.newCondition(); AtomicBoolean startedServing = new AtomicBoolean(); startMetaStoreThreads(conf, metaStoreThreadsLock, startCondition, startedServing, startedBackgroundThreads); signalOtherThreadsToStart(thriftServer, metaStoreThreadsLock, startCondition, startedServing); } // If dynamic service discovery through ZooKeeper is enabled, add this server to the ZooKeeper. if (MetastoreConf.getVar(conf, ConfVars.THRIFT_SERVICE_DISCOVERY_MODE) .equalsIgnoreCase("zookeeper")) { try { zooKeeperHelper = MetastoreConf.getZKConfig(conf); String serverInstanceURI = getServerInstanceURI(port); zooKeeperHelper.addServerInstanceToZooKeeper(serverInstanceURI, serverInstanceURI, null, new ZKDeRegisterWatcher(zooKeeperHelper)); LOG.info("Metastore server instance with URL " + serverInstanceURI + " added to " + "the zookeeper"); } catch (Exception e) { LOG.error("Error adding this metastore instance to ZooKeeper: ", e); throw e; } } // optionally create and start the property and Iceberg REST server ServletServerBuilder builder = new ServletServerBuilder(conf); ServletServerBuilder.Descriptor properties = builder.addServlet(PropertyServlet.createServlet(conf)); builder.addServlet(createCatalogServlet(conf)); servletServer = builder.start(LOG); if (servletServer != null && properties != null) { propertyServletPort = properties.getPort(); } // main server thriftServer.start(); } /** * Creates the HMS catalog servlet descriptor. * @param configuration the configuration * @return the servlet descriptor (can be null) */ static ServletServerBuilder.Descriptor createCatalogServlet(Configuration configuration) { try { String className = MetastoreConf.getVar(configuration, ConfVars.CATALOG_SERVLET_FACTORY); Class<?> iceClazz = Class.forName(className); Method iceStart = iceClazz.getMethod("createServlet", Configuration.class); return (ServletServerBuilder.Descriptor) iceStart.invoke(null, configuration); } catch (ClassNotFoundException xnf) { LOG.warn("Unable to start the Catalog server, missing jar?", xnf); return null; } catch (Exception e) { LOG.error("Unable to start the Catalog server", e); return null; } } /** * @param port where metastore server is running * @return metastore server instance URL. If the metastore server was bound to a configured * host, return that appended by port. Otherwise, return the externally visible URL of the local * host with the given port * @throws Exception */ private static String getServerInstanceURI(int port) throws Exception { return IPStackUtils.concatHostPort(getServerHostName(), port); } static String getServerHostName() throws Exception { if (msHost != null && !msHost.trim().isEmpty()) { return msHost.trim(); } else { return InetAddress.getLocalHost().getCanonicalHostName(); } } private static void signalOtherThreadsToStart(final ThriftServer thriftServer, final Lock startLock, final Condition startCondition, final AtomicBoolean startedServing) { // A simple thread to wait until the server has started and then signal the other threads to // begin Thread t = new Thread() { @Override public void run() { do { try { Thread.sleep(1000); } catch (InterruptedException e) { LOG.warn("Signalling thread was interrupted: " + e.getMessage()); } } while (!thriftServer.isRunning()); startLock.lock(); try { startedServing.set(true); startCondition.signalAll(); } finally { startLock.unlock(); } } }; t.setDaemon(true); t.start(); } /** * Start threads outside of the thrift service, such as the compactor threads. * @param conf Hive configuration object */ private static void startMetaStoreThreads(final Configuration conf, final Lock startLock, final Condition startCondition, final AtomicBoolean startedServing, final AtomicBoolean startedBackGroundThreads) { // A thread is spun up to start these other threads. That's because we can't start them // until after the TServer has started, but once TServer.serve is called we aren't given back // control. Thread t = new Thread() { @Override public void run() { // This is a massive hack. The compactor threads have to access packages in ql (such as // AcidInputFormat). ql depends on metastore so we can't directly access those. To deal // with this the compactor thread classes have been put in ql and they are instantiated here // dyanmically. This is not ideal but it avoids a massive refactoring of Hive packages. // // Wrap the start of the threads in a catch Throwable loop so that any failures // don't doom the rest of the metastore. startLock.lock(); try { JvmPauseMonitor pauseMonitor = new JvmPauseMonitor(conf); pauseMonitor.start(); } catch (Throwable t) { LOG.warn("Could not initiate the JvmPauseMonitor thread." + " GCs and Pauses may not be " + "warned upon.", t); } try { // Per the javadocs on Condition, do not depend on the condition alone as a start gate // since spurious wake ups are possible. while (!startedServing.get()) { startCondition.await(); } LeaderElectionContext context = new LeaderElectionContext.ContextBuilder(conf) .setHMSHandler(thriftServer.getHandler()).servHost(getServerHostName()) .setTType(LeaderElectionContext.TTYPE.ALWAYS_TASKS) // always tasks .addListener(new HouseKeepingTasks(conf, false)) .setTType(LeaderElectionContext.TTYPE.HOUSEKEEPING) // housekeeping tasks .addListener(new CMClearer(conf)) .addListener(new StatsUpdaterTask(conf)) .addListener(new CompactorTasks(conf, false)) .addListener(new CompactorPMF()) .addListener(new HouseKeepingTasks(conf, true)) .setTType(LeaderElectionContext.TTYPE.WORKER) // compactor worker .addListener(new CompactorTasks(conf, true), MetastoreConf.getVar(conf, MetastoreConf.ConfVars.HIVE_METASTORE_RUNWORKER_IN).equals("metastore")) .build(); if (shutdownHookMgr != null) { shutdownHookMgr.addShutdownHook(() -> context.close(), 0); } context.start(); } catch (Throwable e) { LOG.error("Failure when starting the leader tasks, Compaction or Housekeeping tasks may not happen", e); } finally { startLock.unlock(); } if (startedBackGroundThreads != null) { startedBackGroundThreads.set(true); } } }; t.setDaemon(true); t.setName("Metastore threads starter thread"); t.start(); } /** * Print a log message for starting up and shutting down. * @param clazz the class of the server * @param args arguments * @param LOG the target log object */ private static void startupShutdownMessage(Class<?> clazz, String[] args, final org.slf4j.Logger LOG) { final String hostname = getHostname(); final String classname = clazz.getSimpleName(); LOG.info( toStartupShutdownString("STARTUP_MSG: ", new String[] { "Starting " + classname, " host = " + hostname, " args = " + Arrays.asList(args), " version = " + MetastoreVersionInfo.getVersion(), " classpath = " + System.getProperty("java.class.path"), " build = " + MetastoreVersionInfo.getUrl() + " -r " + MetastoreVersionInfo.getRevision() + "; compiled by '" + MetastoreVersionInfo.getUser() + "' on " + MetastoreVersionInfo.getDate()} ) ); } /** * Return a message for logging. * @param prefix prefix keyword for the message * @param msg content of the message * @return a message for logging */ private static String toStartupShutdownString(String prefix, String[] msg) { StringBuilder b = new StringBuilder(prefix); b.append("\n/************************************************************"); for(String s : msg) { b.append("\n") .append(prefix) .append(s); } b.append("\n************************************************************/"); return b.toString(); } /** * Return hostname without throwing exception. * @return hostname */ private static String getHostname() { try { return "" + InetAddress.getLocalHost(); } catch(UnknownHostException uhe) { return "" + uhe; } } }
apache/ignite-3
38,228
modules/sql-engine/src/main/java/org/apache/ignite/internal/sql/engine/sql/fun/IgniteSqlOperatorTable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.sql.engine.sql.fun; import com.google.common.collect.ImmutableList; import java.util.List; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlBasicFunction; import org.apache.calcite.sql.SqlBinaryOperator; import org.apache.calcite.sql.SqlFunction; import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlOperatorBinding; import org.apache.calcite.sql.fun.SqlAbstractTimeFunction; import org.apache.calcite.sql.fun.SqlInternalOperators; import org.apache.calcite.sql.fun.SqlLibraryOperators; import org.apache.calcite.sql.fun.SqlMinMaxAggFunction; import org.apache.calcite.sql.fun.SqlMonotonicBinaryOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlSubstringFunction; import org.apache.calcite.sql.type.InferTypes; import org.apache.calcite.sql.type.OperandTypes; import org.apache.calcite.sql.type.ReturnTypes; import org.apache.calcite.sql.type.SqlOperandTypeChecker; import org.apache.calcite.sql.type.SqlReturnTypeInference; import org.apache.calcite.sql.type.SqlSingleOperandTypeChecker; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.type.SqlTypeTransforms; import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.calcite.sql.util.ReflectiveSqlOperatorTable; import org.apache.ignite.internal.sql.engine.type.IgniteTypeFactory; import org.apache.ignite.internal.sql.engine.util.Commons; import org.apache.ignite.internal.sql.engine.util.TypeUtils; import org.checkerframework.checker.nullness.qual.Nullable; /** * Operator table that contains only Ignite-specific functions and operators. */ public class IgniteSqlOperatorTable extends ReflectiveSqlOperatorTable { private static final SqlSingleOperandTypeChecker SAME_SAME = new SameFamilyOperandTypeChecker(2); private static final SqlOperandTypeChecker DATETIME_MATCHING_INTERVAL = new SqlDateTimeIntervalTypeChecker(true); private static final SqlOperandTypeChecker MATCHING_INTERVAL_DATETIME = new SqlDateTimeIntervalTypeChecker(false); private static final SqlOperandTypeChecker PLUS_OPERATOR_TYPES_CHECKER = OperandTypes.NUMERIC_NUMERIC.and(SAME_SAME) .or(OperandTypes.INTERVAL_SAME_SAME) .or(DATETIME_MATCHING_INTERVAL) .or(MATCHING_INTERVAL_DATETIME); private static final SqlOperandTypeChecker MINUS_OPERATOR_TYPES_CHECKER = OperandTypes.NUMERIC_NUMERIC.and(SAME_SAME) .or(OperandTypes.INTERVAL_SAME_SAME) .or(OperandTypes.DATETIME_INTERVAL.and(DATETIME_MATCHING_INTERVAL)); private static final SqlSingleOperandTypeChecker DIVISION_OPERATOR_TYPES_CHECKER = OperandTypes.NUMERIC_NUMERIC.and(SAME_SAME) .or(OperandTypes.INTERVAL_NUMERIC); public static final SqlSingleOperandTypeChecker MULTIPLY_OPERATOR_TYPES_CHECKER = OperandTypes.NUMERIC_NUMERIC.and(SAME_SAME) .or(OperandTypes.INTERVAL_NUMERIC) .or(OperandTypes.NUMERIC_INTERVAL); public static final SqlFunction LENGTH = new SqlFunction( "LENGTH", SqlKind.OTHER_FUNCTION, ReturnTypes.INTEGER_NULLABLE, null, OperandTypes.CHARACTER.or(OperandTypes.BINARY), SqlFunctionCategory.NUMERIC); public static final SqlFunction SYSTEM_RANGE = new SqlSystemRangeFunction(); public static final SqlFunction TYPEOF = new SqlFunction( "TYPEOF", SqlKind.OTHER_FUNCTION, ReturnTypes.VARCHAR_2000, null, OperandTypes.ANY, SqlFunctionCategory.SYSTEM); /** * Least of two arguments. Unlike LEAST, which is converted to CASE WHEN THEN END clause, this function * is natively implemented. * * <p>Note: System function, cannot be used by user. */ public static final SqlFunction LEAST2 = new SqlFunction( "$LEAST2", SqlKind.OTHER_FUNCTION, ReturnTypes.LEAST_RESTRICTIVE.andThen(SqlTypeTransforms.TO_NULLABLE), null, SAME_SAME, SqlFunctionCategory.SYSTEM); /** * Greatest of two arguments. Unlike GREATEST, which is converted to CASE WHEN THEN END clause, this function * is natively implemented. * * <p>Note: System function, cannot be used by user. */ public static final SqlFunction GREATEST2 = new SqlFunction( "$GREATEST2", SqlKind.OTHER_FUNCTION, ReturnTypes.LEAST_RESTRICTIVE.andThen(SqlTypeTransforms.TO_NULLABLE), null, SAME_SAME, SqlFunctionCategory.SYSTEM); /** * Generic {@code SUBSTR(string, position [, length]} function. * This function works exactly the same as {@link SqlSubstringFunction SUSBSTRING(string, position [, length])}. */ public static final SqlFunction SUBSTR = new SqlFunction( "SUBSTR", SqlKind.OTHER_FUNCTION, ReturnTypes.ARG0_NULLABLE_VARYING, null, OperandTypes.STRING_INTEGER.or(OperandTypes.STRING_INTEGER_INTEGER), SqlFunctionCategory.STRING); /** * The {@code RAND_UUID()} function, which yields a random UUID. */ public static final SqlFunction RAND_UUID = new SqlFunction( "RAND_UUID", SqlKind.OTHER_FUNCTION, ReturnTypes.explicit(SqlTypeName.UUID), null, OperandTypes.NILADIC, SqlFunctionCategory.SYSTEM ) { @Override public boolean isDynamicFunction() { return true; } @Override public boolean isDeterministic() { return false; } }; /** The {@code ROUND(numeric [, numeric])} function. */ public static final SqlFunction ROUND = SqlBasicFunction.create("ROUND", new SetScaleToZeroIfSingleArgument(), OperandTypes.NUMERIC.or(OperandTypes.NUMERIC_INTEGER), SqlFunctionCategory.NUMERIC); /** The {@code TRUNCATE(numeric [, numeric])} function. */ public static final SqlFunction TRUNCATE = SqlBasicFunction.create("TRUNCATE", new SetScaleToZeroIfSingleArgument(), OperandTypes.NUMERIC.or(OperandTypes.NUMERIC_INTEGER), SqlFunctionCategory.NUMERIC); /** The {@code OCTET_LENGTH(string|binary)} function. */ public static final SqlFunction OCTET_LENGTH = SqlBasicFunction.create("OCTET_LENGTH", ReturnTypes.INTEGER_NULLABLE, OperandTypes.CHARACTER.or(OperandTypes.BINARY), SqlFunctionCategory.NUMERIC); /** * Division operator for decimal type. Uses provided values of {@code scale} and {@code precision} to return inferred type. */ public static final SqlFunction DECIMAL_DIVIDE = SqlBasicFunction.create("DECIMAL_DIVIDE", new SqlReturnTypeInference() { @Override public @Nullable RelDataType inferReturnType(SqlOperatorBinding opBinding) { RelDataType arg0 = opBinding.getOperandType(0); Integer precision = opBinding.getOperandLiteralValue(2, Integer.class); Integer scale = opBinding.getOperandLiteralValue(3, Integer.class); assert precision != null : "precision is not specified: " + opBinding.getOperator(); assert scale != null : "scale is not specified: " + opBinding.getOperator(); boolean nullable = arg0.isNullable(); RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); RelDataType returnType = typeFactory.createSqlType(SqlTypeName.DECIMAL, precision, scale); return typeFactory.createTypeWithNullability(returnType, nullable); } }, OperandTypes.DIVISION_OPERATOR, SqlFunctionCategory.NUMERIC); /** * Logical less-than operator, '{@code <}'. */ public static final SqlBinaryOperator LESS_THAN = new SqlBinaryOperator( "<", SqlKind.LESS_THAN, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Logical less-than-or-equal operator, '{@code <=}'. */ public static final SqlBinaryOperator LESS_THAN_OR_EQUAL = new SqlBinaryOperator( "<=", SqlKind.LESS_THAN_OR_EQUAL, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Logical equals operator, '{@code =}'. */ public static final SqlBinaryOperator EQUALS = new SqlBinaryOperator( "=", SqlKind.EQUALS, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Logical greater-than operator, '{@code >}'. */ public static final SqlBinaryOperator GREATER_THAN = new SqlBinaryOperator( ">", SqlKind.GREATER_THAN, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * {@code IS DISTINCT FROM} operator. */ public static final SqlBinaryOperator IS_DISTINCT_FROM = new SqlBinaryOperator( "IS DISTINCT FROM", SqlKind.IS_DISTINCT_FROM, 30, true, ReturnTypes.BOOLEAN, InferTypes.FIRST_KNOWN, SAME_SAME); /** * {@code IS NOT DISTINCT FROM} operator. Is equivalent to {@code NOT(x IS DISTINCT FROM y)}. */ public static final SqlBinaryOperator IS_NOT_DISTINCT_FROM = new SqlBinaryOperator( "IS NOT DISTINCT FROM", SqlKind.IS_NOT_DISTINCT_FROM, 30, true, ReturnTypes.BOOLEAN, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Logical greater-than-or-equal operator, '{@code >=}'. */ public static final SqlBinaryOperator GREATER_THAN_OR_EQUAL = new SqlBinaryOperator( ">=", SqlKind.GREATER_THAN_OR_EQUAL, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Logical not-equals operator, '{@code <>}'. */ public static final SqlBinaryOperator NOT_EQUALS = new SqlBinaryOperator( "<>", SqlKind.NOT_EQUALS, 30, true, ReturnTypes.BOOLEAN_NULLABLE, InferTypes.FIRST_KNOWN, SAME_SAME); /** * Infix arithmetic plus operator, '{@code +}'. */ public static final SqlBinaryOperator PLUS = new SqlMonotonicBinaryOperator( "+", SqlKind.PLUS, 40, true, new SqlReturnTypeInference() { @Override public @Nullable RelDataType inferReturnType(SqlOperatorBinding opBinding) { RelDataType type1 = opBinding.getOperandType(0); RelDataType type2 = opBinding.getOperandType(1); boolean nullable = type1.isNullable() || type2.isNullable(); RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); if (TypeUtils.typeFamiliesAreCompatible(typeFactory, type1, type2)) { RelDataType resultType = typeFactory.getTypeSystem() .deriveDecimalPlusType(typeFactory, type1, type2); if (resultType == null) { resultType = typeFactory.leastRestrictive(List.of(type1, type2)); } if (resultType != null) { return typeFactory.createTypeWithNullability(resultType, nullable); } } else if (SqlTypeUtil.isDatetime(type1) && SqlTypeUtil.isInterval(type2)) { return deriveDatetimePlusMinusIntervalType(typeFactory, type1, type2, nullable); } else if (SqlTypeUtil.isDatetime(type2) && SqlTypeUtil.isInterval(type1)) { return deriveDatetimePlusMinusIntervalType(typeFactory, type2, type1, nullable); } return null; } }, InferTypes.FIRST_KNOWN, PLUS_OPERATOR_TYPES_CHECKER); /** * Infix arithmetic minus operator, '{@code -}'. */ public static final SqlBinaryOperator MINUS = new SqlMonotonicBinaryOperator( "-", SqlKind.MINUS, 40, true, new SqlReturnTypeInference() { @Override public @Nullable RelDataType inferReturnType(SqlOperatorBinding opBinding) { RelDataType type1 = opBinding.getOperandType(0); RelDataType type2 = opBinding.getOperandType(1); boolean nullable = type1.isNullable() || type2.isNullable(); RelDataTypeFactory typeFactory = opBinding.getTypeFactory(); if (TypeUtils.typeFamiliesAreCompatible(typeFactory, type1, type2)) { RelDataType resultType = typeFactory.getTypeSystem() .deriveDecimalPlusType(typeFactory, type1, type2); if (resultType == null) { resultType = typeFactory.leastRestrictive(List.of(type1, type2)); } if (resultType != null) { return typeFactory.createTypeWithNullability(resultType, nullable); } } else if (SqlTypeUtil.isDatetime(type1) && SqlTypeUtil.isInterval(type2)) { return deriveDatetimePlusMinusIntervalType(typeFactory, type1, type2, nullable); } return null; } }, InferTypes.FIRST_KNOWN, MINUS_OPERATOR_TYPES_CHECKER); private static RelDataType deriveDatetimePlusMinusIntervalType( RelDataTypeFactory typeFactory, RelDataType datetimeType, RelDataType intervalType, boolean nullable ) { assert SqlTypeUtil.isDatetime(datetimeType) : "not datetime: " + datetimeType; assert SqlTypeUtil.isInterval(intervalType) : "not interval: " + intervalType; if (datetimeType.getSqlTypeName().allowsPrecScale(true, false) && intervalType.getScale() > datetimeType.getPrecision()) { // Using a fraction of a second from an interval as the precision of the expression. datetimeType = typeFactory.createSqlType(datetimeType.getSqlTypeName(), intervalType.getScale()); } return typeFactory.createTypeWithNullability(datetimeType, nullable); } /** * Arithmetic division operator, '{@code /}'. */ public static final SqlBinaryOperator DIVIDE = new SqlBinaryOperator( "/", SqlKind.DIVIDE, 60, true, ReturnTypes.QUOTIENT_NULLABLE, InferTypes.FIRST_KNOWN, DIVISION_OPERATOR_TYPES_CHECKER); /** * Arithmetic multiplication operator, '{@code *}'. */ public static final SqlBinaryOperator MULTIPLY = new SqlMonotonicBinaryOperator( "*", SqlKind.TIMES, 60, true, ReturnTypes.PRODUCT_NULLABLE, InferTypes.FIRST_KNOWN, MULTIPLY_OPERATOR_TYPES_CHECKER); /** * Arithmetic remainder operator, '{@code %}'. */ public static final SqlBinaryOperator PERCENT_REMAINDER = new SqlBinaryOperator( "%", SqlKind.MOD, 60, true, ReturnTypes.NULLABLE_MOD, null, OperandTypes.EXACT_NUMERIC_EXACT_NUMERIC.and(SAME_SAME)); /** * {@code EVERY} aggregate function. */ public static final SqlAggFunction EVERY = new SqlMinMaxAggFunction("EVERY", SqlKind.MIN, OperandTypes.BOOLEAN); /** * {@code SOME} aggregate function. */ public static final SqlAggFunction SOME = new SqlMinMaxAggFunction("SOME", SqlKind.MAX, OperandTypes.BOOLEAN); /** * The <code>CURRENT_TIMESTAMP [(<i>precision</i>)]</code> function. */ public static final SqlFunction CURRENT_TIMESTAMP = new SqlAbstractTimeFunction("CURRENT_TIMESTAMP", SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE) {}; /** Singleton instance. */ public static final IgniteSqlOperatorTable INSTANCE = new IgniteSqlOperatorTable(); /** IgniteCustomType: A list of functions supported by all custom data types. */ public static final List<SqlFunction> CUSTOM_TYPE_FUNCTIONS = List.of( SqlStdOperatorTable.CAST, SqlStdOperatorTable.COALESCE, SqlStdOperatorTable.NULLIF, TYPEOF, SqlStdOperatorTable.COUNT, SqlStdOperatorTable.MIN, SqlStdOperatorTable.MAX, SqlStdOperatorTable.ANY_VALUE, SOME, SqlStdOperatorTable.SINGLE_VALUE, EVERY ); /** * Default constructor. */ public IgniteSqlOperatorTable() { init0(); } private void init0() { ImmutableList.Builder<SqlOperator> definedOperatorsBuilder = ImmutableList.builder(); // Set operators. definedOperatorsBuilder.add(SqlStdOperatorTable.UNION); definedOperatorsBuilder.add(SqlStdOperatorTable.UNION_ALL); definedOperatorsBuilder.add(SqlStdOperatorTable.EXCEPT); definedOperatorsBuilder.add(SqlStdOperatorTable.EXCEPT_ALL); definedOperatorsBuilder.add(SqlStdOperatorTable.INTERSECT); definedOperatorsBuilder.add(SqlStdOperatorTable.INTERSECT_ALL); // Logical. definedOperatorsBuilder.add(SqlStdOperatorTable.AND); definedOperatorsBuilder.add(SqlStdOperatorTable.OR); definedOperatorsBuilder.add(SqlStdOperatorTable.NOT); // Comparisons. definedOperatorsBuilder.add(LESS_THAN); definedOperatorsBuilder.add(LESS_THAN_OR_EQUAL); definedOperatorsBuilder.add(GREATER_THAN); definedOperatorsBuilder.add(GREATER_THAN_OR_EQUAL); definedOperatorsBuilder.add(EQUALS); definedOperatorsBuilder.add(NOT_EQUALS); definedOperatorsBuilder.add(SqlStdOperatorTable.BETWEEN); definedOperatorsBuilder.add(SqlStdOperatorTable.NOT_BETWEEN); // Arithmetic. definedOperatorsBuilder.add(PLUS); definedOperatorsBuilder.add(MINUS); definedOperatorsBuilder.add(MULTIPLY); definedOperatorsBuilder.add(DIVIDE); definedOperatorsBuilder.add(SqlStdOperatorTable.DIVIDE_INTEGER); // Used internally. definedOperatorsBuilder.add(PERCENT_REMAINDER); definedOperatorsBuilder.add(SqlStdOperatorTable.UNARY_MINUS); definedOperatorsBuilder.add(SqlStdOperatorTable.UNARY_PLUS); // Aggregates. definedOperatorsBuilder.add(SqlStdOperatorTable.COUNT); definedOperatorsBuilder.add(SqlStdOperatorTable.SUM); definedOperatorsBuilder.add(SqlStdOperatorTable.SUM0); definedOperatorsBuilder.add(SqlStdOperatorTable.AVG); definedOperatorsBuilder.add(DECIMAL_DIVIDE); definedOperatorsBuilder.add(SqlStdOperatorTable.MIN); definedOperatorsBuilder.add(SqlStdOperatorTable.MAX); definedOperatorsBuilder.add(SqlStdOperatorTable.ANY_VALUE); definedOperatorsBuilder.add(SqlStdOperatorTable.SINGLE_VALUE); definedOperatorsBuilder.add(SqlStdOperatorTable.FILTER); definedOperatorsBuilder.add(SqlStdOperatorTable.GROUPING); definedOperatorsBuilder.add(EVERY); definedOperatorsBuilder.add(SOME); // IS ... operator. definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NULL); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_NULL); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_TRUE); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_TRUE); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_FALSE); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_FALSE); definedOperatorsBuilder.add(IS_DISTINCT_FROM); definedOperatorsBuilder.add(IS_NOT_DISTINCT_FROM); // LIKE and SIMILAR. definedOperatorsBuilder.add(SqlStdOperatorTable.LIKE); definedOperatorsBuilder.add(SqlStdOperatorTable.NOT_LIKE); definedOperatorsBuilder.add(SqlStdOperatorTable.SIMILAR_TO); definedOperatorsBuilder.add(SqlStdOperatorTable.NOT_SIMILAR_TO); // NULLS ordering. definedOperatorsBuilder.add(SqlStdOperatorTable.NULLS_FIRST); definedOperatorsBuilder.add(SqlStdOperatorTable.NULLS_LAST); definedOperatorsBuilder.add(SqlStdOperatorTable.DESC); // Exists. definedOperatorsBuilder.add(SqlStdOperatorTable.EXISTS); // String functions. definedOperatorsBuilder.add(SqlStdOperatorTable.UPPER); definedOperatorsBuilder.add(SqlStdOperatorTable.LOWER); definedOperatorsBuilder.add(SqlStdOperatorTable.INITCAP); definedOperatorsBuilder.add(SqlLibraryOperators.TO_BASE64); definedOperatorsBuilder.add(SqlLibraryOperators.FROM_BASE64); definedOperatorsBuilder.add(SqlLibraryOperators.MD5); definedOperatorsBuilder.add(SqlLibraryOperators.SHA1); definedOperatorsBuilder.add(SqlStdOperatorTable.SUBSTRING); definedOperatorsBuilder.add(SqlLibraryOperators.LEFT); definedOperatorsBuilder.add(SqlLibraryOperators.RIGHT); definedOperatorsBuilder.add(SqlStdOperatorTable.REPLACE); definedOperatorsBuilder.add(SqlLibraryOperators.TRANSLATE3); definedOperatorsBuilder.add(SqlLibraryOperators.CHR); definedOperatorsBuilder.add(SqlStdOperatorTable.CHAR_LENGTH); definedOperatorsBuilder.add(SqlStdOperatorTable.CHARACTER_LENGTH); definedOperatorsBuilder.add(SqlStdOperatorTable.CONCAT); definedOperatorsBuilder.add(SqlLibraryOperators.CONCAT_FUNCTION); definedOperatorsBuilder.add(SqlStdOperatorTable.OVERLAY); definedOperatorsBuilder.add(SqlStdOperatorTable.POSITION); definedOperatorsBuilder.add(SqlStdOperatorTable.ASCII); definedOperatorsBuilder.add(SqlLibraryOperators.REPEAT); definedOperatorsBuilder.add(SqlLibraryOperators.SPACE); definedOperatorsBuilder.add(SqlLibraryOperators.STRCMP); definedOperatorsBuilder.add(SqlLibraryOperators.SOUNDEX); definedOperatorsBuilder.add(SqlLibraryOperators.DIFFERENCE); definedOperatorsBuilder.add(SqlLibraryOperators.REVERSE); definedOperatorsBuilder.add(SqlStdOperatorTable.TRIM); definedOperatorsBuilder.add(SqlLibraryOperators.LTRIM); definedOperatorsBuilder.add(SqlLibraryOperators.RTRIM); definedOperatorsBuilder.add(SUBSTR); // Math functions. definedOperatorsBuilder.add(SqlStdOperatorTable.MOD); // Arithmetic remainder. definedOperatorsBuilder.add(SqlStdOperatorTable.EXP); // Euler's number e raised to the power of a value. definedOperatorsBuilder.add(SqlStdOperatorTable.POWER); definedOperatorsBuilder.add(SqlStdOperatorTable.LN); // Natural logarithm. definedOperatorsBuilder.add(SqlStdOperatorTable.LOG10); // The base 10 logarithm. definedOperatorsBuilder.add(SqlStdOperatorTable.ABS); // Absolute value. definedOperatorsBuilder.add(SqlStdOperatorTable.RAND); // Random. definedOperatorsBuilder.add(SqlStdOperatorTable.RAND_INTEGER); // Integer random. definedOperatorsBuilder.add(SqlStdOperatorTable.ACOS); // Arc cosine. definedOperatorsBuilder.add(SqlStdOperatorTable.ASIN); // Arc sine. definedOperatorsBuilder.add(SqlStdOperatorTable.ATAN); // Arc tangent. definedOperatorsBuilder.add(SqlStdOperatorTable.ATAN2); // Angle from coordinates. definedOperatorsBuilder.add(SqlStdOperatorTable.SQRT); // Square root. definedOperatorsBuilder.add(SqlStdOperatorTable.CBRT); // Cube root. definedOperatorsBuilder.add(SqlStdOperatorTable.COS); // Cosine definedOperatorsBuilder.add(SqlLibraryOperators.COSH); // Hyperbolic cosine. definedOperatorsBuilder.add(SqlStdOperatorTable.COT); // Cotangent. definedOperatorsBuilder.add(SqlStdOperatorTable.DEGREES); // Radians to degrees. definedOperatorsBuilder.add(SqlStdOperatorTable.RADIANS); // Degrees to radians. definedOperatorsBuilder.add(ROUND); // Fixes return type scale. definedOperatorsBuilder.add(SqlStdOperatorTable.SIGN); definedOperatorsBuilder.add(SqlStdOperatorTable.SIN); // Sine. definedOperatorsBuilder.add(SqlLibraryOperators.SINH); // Hyperbolic sine. definedOperatorsBuilder.add(SqlStdOperatorTable.TAN); // Tangent. definedOperatorsBuilder.add(SqlLibraryOperators.TANH); // Hyperbolic tangent. definedOperatorsBuilder.add(TRUNCATE); // Fixes return type scale. definedOperatorsBuilder.add(SqlStdOperatorTable.PI); // Date and time. definedOperatorsBuilder.add(SqlStdOperatorTable.DATETIME_PLUS); definedOperatorsBuilder.add(SqlStdOperatorTable.MINUS_DATE); definedOperatorsBuilder.add(SqlStdOperatorTable.EXTRACT); definedOperatorsBuilder.add(SqlStdOperatorTable.FLOOR); definedOperatorsBuilder.add(SqlStdOperatorTable.CEIL); definedOperatorsBuilder.add(SqlStdOperatorTable.TIMESTAMP_ADD); definedOperatorsBuilder.add(SqlStdOperatorTable.TIMESTAMP_DIFF); definedOperatorsBuilder.add(SqlStdOperatorTable.LAST_DAY); definedOperatorsBuilder.add(SqlLibraryOperators.DAYNAME); definedOperatorsBuilder.add(SqlLibraryOperators.MONTHNAME); definedOperatorsBuilder.add(SqlStdOperatorTable.DAYOFMONTH); definedOperatorsBuilder.add(SqlStdOperatorTable.DAYOFWEEK); definedOperatorsBuilder.add(SqlStdOperatorTable.DAYOFYEAR); definedOperatorsBuilder.add(SqlStdOperatorTable.YEAR); definedOperatorsBuilder.add(SqlStdOperatorTable.QUARTER); definedOperatorsBuilder.add(SqlStdOperatorTable.MONTH); definedOperatorsBuilder.add(SqlStdOperatorTable.WEEK); definedOperatorsBuilder.add(SqlStdOperatorTable.HOUR); definedOperatorsBuilder.add(SqlStdOperatorTable.MINUTE); definedOperatorsBuilder.add(SqlStdOperatorTable.SECOND); definedOperatorsBuilder.add(SqlLibraryOperators.TIMESTAMP_SECONDS); // Seconds since 1970-01-01 to timestamp. definedOperatorsBuilder.add(SqlLibraryOperators.TIMESTAMP_MILLIS); // Milliseconds since 1970-01-01 to timestamp. definedOperatorsBuilder.add(SqlLibraryOperators.TIMESTAMP_MICROS); // Microseconds since 1970-01-01 to timestamp. definedOperatorsBuilder.add(SqlLibraryOperators.UNIX_SECONDS); // Timestamp to seconds since 1970-01-01. definedOperatorsBuilder.add(SqlLibraryOperators.UNIX_MILLIS); // Timestamp to milliseconds since 1970-01-01. definedOperatorsBuilder.add(SqlLibraryOperators.UNIX_MICROS); // Timestamp to microseconds since 1970-01-01. definedOperatorsBuilder.add(SqlLibraryOperators.UNIX_DATE); // Date to days since 1970-01-01. definedOperatorsBuilder.add(SqlLibraryOperators.DATE_FROM_UNIX_DATE); // Days since 1970-01-01 to date. definedOperatorsBuilder.add(SqlLibraryOperators.DATE); // String to date. // POSIX REGEX. definedOperatorsBuilder.add(SqlStdOperatorTable.POSIX_REGEX_CASE_INSENSITIVE); definedOperatorsBuilder.add(SqlStdOperatorTable.POSIX_REGEX_CASE_SENSITIVE); definedOperatorsBuilder.add(SqlStdOperatorTable.NEGATED_POSIX_REGEX_CASE_INSENSITIVE); definedOperatorsBuilder.add(SqlStdOperatorTable.NEGATED_POSIX_REGEX_CASE_SENSITIVE); definedOperatorsBuilder.add(SqlLibraryOperators.REGEXP_REPLACE_2); definedOperatorsBuilder.add(SqlLibraryOperators.REGEXP_REPLACE_3); definedOperatorsBuilder.add(SqlLibraryOperators.REGEXP_REPLACE_4); definedOperatorsBuilder.add(SqlLibraryOperators.REGEXP_REPLACE_5); definedOperatorsBuilder.add(SqlLibraryOperators.REGEXP_REPLACE_6); // Collections. definedOperatorsBuilder.add(SqlStdOperatorTable.MAP_VALUE_CONSTRUCTOR); definedOperatorsBuilder.add(SqlStdOperatorTable.ARRAY_VALUE_CONSTRUCTOR); definedOperatorsBuilder.add(SqlStdOperatorTable.ITEM); definedOperatorsBuilder.add(SqlStdOperatorTable.CARDINALITY); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_EMPTY); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_EMPTY); // TODO https://issues.apache.org/jira/browse/IGNITE-19332 // definedOperatorsBuilder.add(SqlStdOperatorTable.MAP_QUERY); // definedOperatorsBuilder.add(SqlStdOperatorTable.ARRAY_QUERY); // Multiset. // TODO https://issues.apache.org/jira/browse/IGNITE-15551 // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_VALUE); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_QUERY); // definedOperatorsBuilder.add(SqlStdOperatorTable.SLICE); // definedOperatorsBuilder.add(SqlStdOperatorTable.ELEMENT); // definedOperatorsBuilder.add(SqlStdOperatorTable.STRUCT_ACCESS); // definedOperatorsBuilder.add(SqlStdOperatorTable.MEMBER_OF); // definedOperatorsBuilder.add(SqlStdOperatorTable.IS_A_SET); // definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_A_SET); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_INTERSECT_DISTINCT); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_INTERSECT); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_EXCEPT_DISTINCT); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_EXCEPT); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_UNION_DISTINCT); // definedOperatorsBuilder.add(SqlStdOperatorTable.MULTISET_UNION); // definedOperatorsBuilder.add(SqlStdOperatorTable.SUBMULTISET_OF); // definedOperatorsBuilder.add(SqlStdOperatorTable.NOT_SUBMULTISET_OF); // Other functions and operators. definedOperatorsBuilder.add(SqlStdOperatorTable.ROW); definedOperatorsBuilder.add(SqlStdOperatorTable.CAST); definedOperatorsBuilder.add(SqlLibraryOperators.INFIX_CAST); definedOperatorsBuilder.add(SqlStdOperatorTable.COALESCE); definedOperatorsBuilder.add(SqlLibraryOperators.NVL); definedOperatorsBuilder.add(SqlStdOperatorTable.NULLIF); definedOperatorsBuilder.add(SqlStdOperatorTable.CASE); definedOperatorsBuilder.add(SqlLibraryOperators.DECODE); definedOperatorsBuilder.add(SqlLibraryOperators.LEAST); definedOperatorsBuilder.add(SqlLibraryOperators.GREATEST); definedOperatorsBuilder.add(SqlLibraryOperators.COMPRESS); definedOperatorsBuilder.add(OCTET_LENGTH); definedOperatorsBuilder.add(SqlStdOperatorTable.DEFAULT); definedOperatorsBuilder.add(SqlStdOperatorTable.REINTERPRET); // XML Operators. definedOperatorsBuilder.add(SqlLibraryOperators.EXTRACT_VALUE); definedOperatorsBuilder.add(SqlLibraryOperators.XML_TRANSFORM); definedOperatorsBuilder.add(SqlLibraryOperators.EXTRACT_XML); definedOperatorsBuilder.add(SqlLibraryOperators.EXISTS_NODE); // JSON Operators definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_TYPE_OPERATOR); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_VALUE_EXPRESSION); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_VALUE); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_QUERY); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_TYPE); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_EXISTS); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_DEPTH); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_KEYS); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_PRETTY); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_LENGTH); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_REMOVE); definedOperatorsBuilder.add(SqlLibraryOperators.JSON_STORAGE_SIZE); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_OBJECT); definedOperatorsBuilder.add(SqlStdOperatorTable.JSON_ARRAY); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_JSON_VALUE); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_JSON_OBJECT); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_JSON_ARRAY); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_JSON_SCALAR); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_JSON_VALUE); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_JSON_OBJECT); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_JSON_ARRAY); definedOperatorsBuilder.add(SqlStdOperatorTable.IS_NOT_JSON_SCALAR); // Aggregate functions. definedOperatorsBuilder.add(SqlInternalOperators.LITERAL_AGG); // Current time functions. definedOperatorsBuilder.add(CURRENT_TIMESTAMP); definedOperatorsBuilder.add(SqlStdOperatorTable.CURRENT_DATE); definedOperatorsBuilder.add(SqlStdOperatorTable.LOCALTIME); definedOperatorsBuilder.add(SqlStdOperatorTable.LOCALTIMESTAMP); // Context variable functions definedOperatorsBuilder.add(SqlStdOperatorTable.CURRENT_USER); // Ignite specific operators definedOperatorsBuilder.add(LENGTH); definedOperatorsBuilder.add(SYSTEM_RANGE); definedOperatorsBuilder.add(TYPEOF); definedOperatorsBuilder.add(LEAST2); definedOperatorsBuilder.add(GREATEST2); definedOperatorsBuilder.add(RAND_UUID); setOperators(buildIndex(definedOperatorsBuilder.build())); } /** Sets scale to {@code 0} for single argument variants of ROUND/TRUNCATE operators. */ private static class SetScaleToZeroIfSingleArgument implements SqlReturnTypeInference { @Override public @Nullable RelDataType inferReturnType(SqlOperatorBinding opBinding) { RelDataType operandType = opBinding.getOperandType(0); // If there is only one argument and it supports precision and scale, set scale 0. if (opBinding.getOperandCount() == 1 && operandType.getSqlTypeName().allowsPrecScale(true, true)) { int precision = operandType.getPrecision(); IgniteTypeFactory typeFactory = Commons.typeFactory(); RelDataType returnType = typeFactory.createSqlType(operandType.getSqlTypeName(), precision, 0); // Preserve nullability boolean nullable = operandType.isNullable(); return typeFactory.createTypeWithNullability(returnType, nullable); } else { return operandType; } } } }
apache/ozone
37,741
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools.contract; import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile; import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset; import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; import static org.apache.hadoop.fs.contract.ContractTestUtils.verifyFileContents; import static org.apache.hadoop.fs.contract.ContractTestUtils.verifyPathExists; import static org.apache.hadoop.fs.statistics.IOStatisticsLogging.logIOStatisticsAtLevel; import static org.apache.hadoop.tools.DistCpConstants.CONF_LABEL_DISTCP_JOB_ID; import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.tools.CopyListingFileStatus; import org.apache.hadoop.tools.DistCp; import org.apache.hadoop.tools.DistCpConstants; import org.apache.hadoop.tools.DistCpOptions; import org.apache.hadoop.tools.SimpleCopyListing; import org.apache.hadoop.tools.mapred.CopyMapper; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.functional.RemoteIterators; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Contract test suite covering a file system's integration with DistCp. The * tests coordinate two file system instances: one "local", which is the local * file system, and the other "remote", which is the file system implementation * under test. The tests in the suite cover both copying from local to remote * (e.g. a backup use case) and copying from remote to local (e.g. a restore use * case). * The HDFS contract test needs to be run explicitly. */ public abstract class AbstractContractDistCpTest extends AbstractFSContractTestBase { private static final Logger LOG = LoggerFactory.getLogger(AbstractContractDistCpTest.class); /** Using offset to change modification time in tests. */ private static final long MODIFICATION_TIME_OFFSET = 10000; public static final String SCALE_TEST_DISTCP_FILE_SIZE_KB = "scale.test.distcp.file.size.kb"; public static final int DEFAULT_DISTCP_SIZE_KB = 1024; protected static final int MB = 1024 * 1024; /** * Default depth for a directory tree: {@value}. */ protected static final int DEFAULT_DEPTH = 3; /** * Default width for a directory tree: {@value}. * Total dir size is * <pre> * DEFAULT_WITH^DEFAULT_DEPTH * </pre> * So the duration of a test run grows rapidly with this value. * This has very significant consequences for object storage runs. */ protected static final int DEFAULT_WIDTH = 2; private Configuration conf; private FileSystem localFS, remoteFS; private Path localDir, remoteDir; private Path inputDir; private Path inputSubDir1; private Path inputSubDir2; private Path inputSubDir4; private Path inputFile1; private Path inputFile2; private Path inputFile3; private Path inputFile4; private Path inputFile5; private Path outputSubDir2; private Path outputSubDir4; private Path outputFile1; private Path outputFile2; private Path outputFile3; private Path outputFile4; private Path inputDirUnderOutputDir; /** * The timeout value is extended over the default so that large updates * are allowed to take time, especially to remote stores. * @return the current test timeout */ @Override protected int getTestTimeoutMillis() { return 15 * 60 * 1000; } @Override protected Configuration createConfiguration() { Configuration newConf = new Configuration(); newConf.set("mapred.job.tracker", "local"); return newConf; } @BeforeEach @Override public void setup() throws Exception { super.setup(); conf = getContract().getConf(); localFS = FileSystem.getLocal(conf); remoteFS = getFileSystem(); // Test paths are isolated by concrete subclass name and test method name. // All paths are fully qualified including scheme (not taking advantage of // default file system), so if something fails, the messages will make it // clear which paths are local and which paths are remote. String className = getClass().getSimpleName(); String testSubDir = className + "/" + getMethodName(); localDir = localFS.makeQualified(new Path(new Path( GenericTestUtils.getTestDir().toURI()), testSubDir + "/local")); localFS.delete(localDir, true); mkdirs(localFS, localDir); Path testSubPath = path(testSubDir); remoteDir = new Path(testSubPath, "remote"); // test teardown does this, but IDE-based test debugging can skip // that teardown; this guarantees the initial state is clean remoteFS.delete(remoteDir, true); } @AfterEach @Override public void teardown() throws Exception { // if remote FS supports IOStatistics log it. logIOStatisticsAtLevel(LOG, "info", getRemoteFS()); super.teardown(); } /** * Set up both input and output fields. * @param src source tree * @param dest dest tree */ protected void initPathFields(final Path src, final Path dest) { initInputFields(src); initOutputFields(dest); } /** * Output field setup. * @param path path to set up */ protected void initOutputFields(final Path path) { Path outputDir = new Path(path, "outputDir"); inputDirUnderOutputDir = new Path(outputDir, "inputDir"); outputFile1 = new Path(inputDirUnderOutputDir, "file1"); Path outputSubDir1 = new Path(inputDirUnderOutputDir, "subDir1"); outputFile2 = new Path(outputSubDir1, "file2"); outputSubDir2 = new Path(inputDirUnderOutputDir, "subDir2/subDir2"); outputFile3 = new Path(outputSubDir2, "file3"); outputSubDir4 = new Path(inputDirUnderOutputDir, "subDir4/subDir4"); outputFile4 = new Path(outputSubDir4, "file4"); } /** * this path setup is used across different methods (copy, update, track) * so they are set up as fields. * @param srcDir source directory for these to go under. */ protected void initInputFields(final Path srcDir) { inputDir = new Path(srcDir, "inputDir"); inputFile1 = new Path(inputDir, "file1"); inputSubDir1 = new Path(inputDir, "subDir1"); inputFile2 = new Path(inputSubDir1, "file2"); inputSubDir2 = new Path(inputDir, "subDir2/subDir2"); inputFile3 = new Path(inputSubDir2, "file3"); inputSubDir4 = new Path(inputDir, "subDir4/subDir4"); inputFile4 = new Path(inputSubDir4, "file4"); inputFile5 = new Path(inputSubDir4, "file5"); } protected FileSystem getLocalFS() { return localFS; } protected FileSystem getRemoteFS() { return remoteFS; } protected Path getLocalDir() { return localDir; } protected Path getRemoteDir() { return remoteDir; } @Test public void testUpdateDeepDirectoryStructureToRemote() throws Exception { describe("update a deep directory structure from local to remote"); distCpDeepDirectoryStructure(localFS, localDir, remoteFS, remoteDir); distCpUpdateDeepDirectoryStructure(inputDirUnderOutputDir); } @Test public void testUpdateDeepDirectoryStructureNoChange() throws Exception { describe("update an unchanged directory structure" + " from local to remote; expect no copy"); Path target = distCpDeepDirectoryStructure(localFS, localDir, remoteFS, remoteDir); describe("\nExecuting Update\n"); Job job = distCpUpdate(localDir, target); assertCounterInRange(job, CopyMapper.Counter.SKIP, 1, -1); assertCounterInRange(job, CopyMapper.Counter.BYTESCOPIED, 0, 0); } /** * Assert that a counter is in a range; min and max values are inclusive. * @param job job to query * @param counter counter to examine * @param min min value, if negative "no minimum" * @param max max value, if negative "no maximum" * @throws IOException IO problem */ void assertCounterInRange(Job job, Enum<?> counter, long min, long max) throws IOException { Counter c = job.getCounters().findCounter(counter); long value = c.getValue(); String description = String.format("%s value %s", c.getDisplayName(), value, false); if (min >= 0) { assertThat(value) .withFailMessage(description + " too below minimum " + min) .isGreaterThanOrEqualTo(min); } if (max >= 0) { assertThat(value) .withFailMessage(description + " above maximum " + max) .isLessThanOrEqualTo(max); } } /** * Do a distcp from the local source to the destination filesystem. * This is executed as part of * {@link #testUpdateDeepDirectoryStructureToRemote()}; it's designed to be * overidden or wrapped by subclasses which wish to add more assertions. * * Life is complicated here by the way that the src/dest paths * on a distcp is different with -update. * @param destDir output directory used by the initial distcp * @return the distcp job */ protected Job distCpUpdateDeepDirectoryStructure(final Path destDir) throws Exception { describe("Now do an incremental update with deletion of missing files"); Path srcDir = inputDir; LOG.info("Source directory = {}, dest={}", srcDir, destDir); ContractTestUtils.assertPathsExist(localFS, "Paths for test are wrong", inputFile1, inputFile2, inputFile3, inputFile4, inputFile5); modifySourceDirectories(); Job job = distCpUpdate(srcDir, destDir); Path outputFileNew1 = new Path(outputSubDir2, "newfile1"); lsR("Updated Remote", remoteFS, destDir); ContractTestUtils.assertPathDoesNotExist(remoteFS, " deleted from " + inputFile1, outputFile1); ContractTestUtils.assertIsFile(remoteFS, outputFileNew1); ContractTestUtils.assertPathsDoNotExist(remoteFS, "DistCP should have deleted", outputFile3, outputFile4, outputSubDir4); assertCounterInRange(job, CopyMapper.Counter.COPY, 1, 1); assertCounterInRange(job, CopyMapper.Counter.SKIP, 1, -1); return job; } /** * Run distcp -update srcDir destDir. * @param srcDir local source directory * @param destDir remote destination directory. * @return the completed job * @throws Exception any failure. */ private Job distCpUpdate(final Path srcDir, final Path destDir) throws Exception { describe("\nDistcp -update from " + srcDir + " to " + destDir); lsR("Local to update", localFS, srcDir); lsR("Remote before update", remoteFS, destDir); return runDistCp(buildWithStandardOptions( new DistCpOptions.Builder( Collections.singletonList(srcDir), destDir) .withDeleteMissing(true) .withSyncFolder(true) .withSkipCRC(true) .withDirectWrite(shouldUseDirectWrite()) .withOverwrite(false))); } /** * Run distcp -update srcDir destDir. * @param srcDir local source directory * @param destDir remote destination directory. * @return the completed job * @throws Exception any failure. */ private Job distCpUpdateWithFs(final Path srcDir, final Path destDir, FileSystem sourceFs, FileSystem targetFs) throws Exception { describe("\nDistcp -update from " + srcDir + " to " + destDir); lsR("Source Fs to update", sourceFs, srcDir); lsR("Target Fs before update", targetFs, destDir); return runDistCp(buildWithStandardOptions( new DistCpOptions.Builder( Collections.singletonList(srcDir), destDir) .withDeleteMissing(true) .withSyncFolder(true) .withSkipCRC(false) .withDirectWrite(shouldUseDirectWrite()) .withOverwrite(false))); } /** * Update the source directories as various tests expect, * including adding a new file. * @return the path to the newly created file * @throws IOException IO failure */ private Path modifySourceDirectories() throws IOException { localFS.delete(inputFile1, false); localFS.delete(inputFile3, false); // delete all of subdir4, so input/output file 4 & 5 will go localFS.delete(inputSubDir4, true); // add one new file Path inputFileNew1 = new Path(inputSubDir2, "newfile1"); ContractTestUtils.touch(localFS, inputFileNew1); return inputFileNew1; } @Test public void testTrackDeepDirectoryStructureToRemote() throws Exception { describe("copy a deep directory structure from local to remote"); Path destDir = distCpDeepDirectoryStructure(localFS, localDir, remoteFS, remoteDir); ContractTestUtils.assertIsDirectory(remoteFS, destDir); describe("Now do an incremental update and save of missing files"); Path srcDir = inputDir; // same path setup as in deepDirectoryStructure() Path trackDir = new Path(localDir, "trackDir"); describe("\nDirectories\n"); lsR("Local to update", localFS, srcDir); lsR("Remote before update", remoteFS, destDir); ContractTestUtils.assertPathsExist(localFS, "Paths for test are wrong", inputFile2, inputFile3, inputFile4, inputFile5); Path inputFileNew1 = modifySourceDirectories(); // Distcp set to track but not delete runDistCp(buildWithStandardOptions( new DistCpOptions.Builder( Collections.singletonList(srcDir), inputDirUnderOutputDir) .withTrackMissing(trackDir) .withSyncFolder(true) .withDirectWrite(shouldUseDirectWrite()) .withOverwrite(false))); lsR("tracked update", remoteFS, destDir); // new file went over Path outputFileNew1 = new Path(outputSubDir2, "newfile1"); ContractTestUtils.assertIsFile(remoteFS, outputFileNew1); ContractTestUtils.assertPathExists(localFS, "tracking directory", trackDir); // now read in the listings Path sortedSourceListing = new Path(trackDir, DistCpConstants.SOURCE_SORTED_FILE); ContractTestUtils.assertIsFile(localFS, sortedSourceListing); Path sortedTargetListing = new Path(trackDir, DistCpConstants.TARGET_SORTED_FILE); ContractTestUtils.assertIsFile(localFS, sortedTargetListing); // deletion didn't happen ContractTestUtils.assertPathsExist(remoteFS, "DistCP should have retained", outputFile2, outputFile3, outputFile4, outputSubDir4); // now scan the table and see that things are there. Map<String, Path> sourceFiles = new HashMap<>(10); Map<String, Path> targetFiles = new HashMap<>(10); try (SequenceFile.Reader sourceReader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(sortedSourceListing)); SequenceFile.Reader targetReader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(sortedTargetListing))) { CopyListingFileStatus copyStatus = new CopyListingFileStatus(); Text name = new Text(); while (sourceReader.next(name, copyStatus)) { String key = name.toString(); Path path = copyStatus.getPath(); LOG.info("{}: {}", key, path); sourceFiles.put(key, path); } while (targetReader.next(name, copyStatus)) { String key = name.toString(); Path path = copyStatus.getPath(); LOG.info("{}: {}", key, path); targetFiles.put(name.toString(), copyStatus.getPath()); } } // look for the new file in both lists assertThat(sourceFiles) .withFailMessage("No " + outputFileNew1 + " in source listing") .containsValue(inputFileNew1); assertThat(targetFiles) .withFailMessage("No " + outputFileNew1 + " in target listing") .containsValue(outputFileNew1); assertThat(targetFiles) .withFailMessage("No " + outputSubDir4 + " in target listing") .containsValue(outputSubDir4); assertThat(sourceFiles) .withFailMessage("Found " + inputSubDir4 + " in source listing") .doesNotContainValue(inputSubDir4); } public void lsR(final String description, final FileSystem fs, final Path dir) throws IOException { RemoteIterator<LocatedFileStatus> files = fs.listFiles(dir, true); LOG.info("{}: {}:", description, dir); StringBuilder sb = new StringBuilder(); while (files.hasNext()) { LocatedFileStatus status = files.next(); sb.append(String.format(" %s; type=%s; length=%d", status.getPath(), status.isDirectory() ? "dir" : "file", status.getLen())); } LOG.info("{}", sb); } @Test public void largeFilesToRemote() throws Exception { describe("copy multiple large files from local to remote"); largeFiles(localFS, localDir, remoteFS, remoteDir); } @Test public void testDeepDirectoryStructureFromRemote() throws Exception { describe("copy a deep directory structure from remote to local"); distCpDeepDirectoryStructure(remoteFS, remoteDir, localFS, localDir); } @Test public void testLargeFilesFromRemote() throws Exception { describe("copy multiple large files from remote to local"); largeFiles(remoteFS, remoteDir, localFS, localDir); } @Test public void testSetJobId() throws Exception { describe("check jobId is set in the conf"); remoteFS.create(new Path(remoteDir, "file1")).close(); assertRunDistCp(DistCpConstants.SUCCESS, remoteDir.toString(), localDir.toString(), getDefaultCLIOptionsOrNull(), conf); assertThat(conf.get(CONF_LABEL_DISTCP_JOB_ID)) .withFailMessage("DistCp job id isn't set") .isNotNull(); } /** * Executes a DistCp using a file system sub-tree with multiple nesting * levels. * The filenames are those of the fields initialized in setup. * * @param srcFS source FileSystem * @param srcDir source directory * @param dstFS destination FileSystem * @param dstDir destination directory * @return the target directory of the copy * @throws Exception if there is a failure */ private Path distCpDeepDirectoryStructure(FileSystem srcFS, Path srcDir, FileSystem dstFS, Path dstDir) throws Exception { initPathFields(srcDir, dstDir); mkdirs(srcFS, inputSubDir1); mkdirs(srcFS, inputSubDir2); byte[] data1 = dataset(100, 33, 43); createFile(srcFS, inputFile1, true, data1); byte[] data2 = dataset(200, 43, 53); createFile(srcFS, inputFile2, true, data2); byte[] data3 = dataset(300, 53, 63); createFile(srcFS, inputFile3, true, data3); createFile(srcFS, inputFile4, true, dataset(400, 53, 63)); createFile(srcFS, inputFile5, true, dataset(500, 53, 63)); Path target = new Path(dstDir, "outputDir"); runDistCp(inputDir, target); ContractTestUtils.assertIsDirectory(dstFS, target); lsR("Destination tree after distcp", dstFS, target); verifyFileContents(dstFS, new Path(target, "inputDir/file1"), data1); verifyFileContents(dstFS, new Path(target, "inputDir/subDir1/file2"), data2); verifyFileContents(dstFS, new Path(target, "inputDir/subDir2/subDir2/file3"), data3); return target; } /** * Executes a test using multiple large files. * * @param srcFS source FileSystem * @param srcDir source directory * @param dstFS destination FileSystem * @param dstDir destination directory * @throws Exception if there is a failure */ private void largeFiles(FileSystem srcFS, Path srcDir, FileSystem dstFS, Path dstDir) throws Exception { int fileSizeKb = conf.getInt(SCALE_TEST_DISTCP_FILE_SIZE_KB, getDefaultDistCPSizeKb()); if (fileSizeKb < 1) { skip("File size in " + SCALE_TEST_DISTCP_FILE_SIZE_KB + " is zero"); } initPathFields(srcDir, dstDir); Path largeFile1 = new Path(inputDir, "file1"); Path largeFile2 = new Path(inputDir, "file2"); Path largeFile3 = new Path(inputDir, "file3"); int fileSizeMb = fileSizeKb / 1024; getLogger().info("{} with file size {}", getMethodName(), fileSizeMb); byte[] data1 = dataset((fileSizeMb + 1) * MB, 33, 43); createFile(srcFS, largeFile1, true, data1); byte[] data2 = dataset((fileSizeMb + 2) * MB, 43, 53); createFile(srcFS, largeFile2, true, data2); byte[] data3 = dataset((fileSizeMb + 3) * MB, 53, 63); createFile(srcFS, largeFile3, true, data3); Path target = new Path(dstDir, "outputDir"); runDistCp(inputDir, target); verifyFileContents(dstFS, new Path(target, "inputDir/file1"), data1); verifyFileContents(dstFS, new Path(target, "inputDir/file2"), data2); verifyFileContents(dstFS, new Path(target, "inputDir/file3"), data3); } /** * Override point. What is the default distcp size * for large files if not overridden by * {@link #SCALE_TEST_DISTCP_FILE_SIZE_KB}. * If 0 then, unless overridden in the configuration, * the large file tests will not run. * @return file size. */ protected int getDefaultDistCPSizeKb() { return DEFAULT_DISTCP_SIZE_KB; } /** * Executes DistCp and asserts that the job finished successfully. * The choice of direct/indirect is based on the value of * {@link #shouldUseDirectWrite()}. * @param src source path * @param dst destination path * @throws Exception if there is a failure */ private void runDistCp(Path src, Path dst) throws Exception { if (shouldUseDirectWrite()) { runDistCpDirectWrite(src, dst); } else { runDistCpWithRename(src, dst); } } /** * Run the distcp job. * @param options distcp options * @return the job. It will have already completed. * @throws Exception failure */ private Job runDistCp(final DistCpOptions options) throws Exception { Job job = new DistCp(conf, options).execute(); assertThat(job).withFailMessage("Unexpected null job returned from DistCp execution.") .isNotNull(); assertThat(job.isComplete()) .withFailMessage("DistCp job did not complete.") .isTrue(); assertThat(job.isSuccessful()) .withFailMessage("DistCp job did not complete successfully.") .isTrue(); return job; } /** * Add any standard options and then build. * @param builder DistCp option builder * @return the build options */ private DistCpOptions buildWithStandardOptions( DistCpOptions.Builder builder) { return builder .withNumListstatusThreads(DistCpOptions.MAX_NUM_LISTSTATUS_THREADS) .build(); } /** * Creates a directory and any ancestor directories required. * * @param fs FileSystem in which to create directories * @param dir path of directory to create * @throws Exception if there is a failure */ private static void mkdirs(FileSystem fs, Path dir) throws Exception { assertThat(fs.mkdirs(dir)) .withFailMessage("Failed to mkdir " + dir) .isTrue(); } @Test public void testDirectWrite() throws Exception { describe("copy file from local to remote using direct write option"); if (shouldUseDirectWrite()) { skip("not needed as all other tests use the -direct option."); } directWrite(localFS, localDir, remoteFS, remoteDir, true); } @Test public void testNonDirectWrite() throws Exception { describe("copy file from local to remote without using direct write " + "option"); directWrite(localFS, localDir, remoteFS, remoteDir, false); } @Test public void testDistCpWithIterator() throws Exception { describe("Build listing in distCp using the iterator option."); Path source = new Path(remoteDir, "src"); Path dest = new Path(localDir, "dest"); dest = localFS.makeQualified(dest); GenericTestUtils .createFiles(remoteFS, source, getDepth(), getWidth(), getWidth()); GenericTestUtils.LogCapturer log = GenericTestUtils.LogCapturer.captureLogs(SimpleCopyListing.LOG); String options = "-useiterator -update -delete" + getDefaultCLIOptions(); assertRunDistCp(DistCpConstants.SUCCESS, source.toString(), dest.toString(), options, conf); // Check the target listing was also done using iterator. assertThat(log.getOutput()).contains( "Building listing using iterator mode for " + dest); assertThat(RemoteIterators.toList(localFS.listFiles(dest, true))) .describedAs("files").hasSize(getTotalFiles()); } public int getDepth() { return DEFAULT_DEPTH; } public int getWidth() { return DEFAULT_WIDTH; } private int getTotalFiles() { int totalFiles = 0; for (int i = 1; i <= getDepth(); i++) { totalFiles += Math.pow(getWidth(), i); } return totalFiles; } /** * Override point: should direct write always be used? * false by default; enable for stores where rename is slow. * @return true if direct write should be used in all tests. */ protected boolean shouldUseDirectWrite() { return false; } /** * Return the default options for distcp, including, * if {@link #shouldUseDirectWrite()} is true, * the -direct option. * Append or prepend this to string CLIs. * @return default options. */ protected String getDefaultCLIOptions() { return shouldUseDirectWrite() ? " -direct " : ""; } /** * Return the default options for distcp, including, * if {@link #shouldUseDirectWrite()} is true, * the -direct option, null if there are no * defaults. * @return default options. */ protected String getDefaultCLIOptionsOrNull() { return shouldUseDirectWrite() ? " -direct " : null; } /** * Executes a test with support for using direct write option. * * @param srcFS source FileSystem * @param srcDir source directory * @param dstFS destination FileSystem * @param dstDir destination directory * @param directWrite whether to use -directwrite option * @throws Exception if there is a failure */ private void directWrite(FileSystem srcFS, Path srcDir, FileSystem dstFS, Path dstDir, boolean directWrite) throws Exception { initPathFields(srcDir, dstDir); // Create 2 test files mkdirs(srcFS, inputSubDir1); byte[] data1 = dataset(64, 33, 43); createFile(srcFS, inputFile1, true, data1); byte[] data2 = dataset(200, 43, 53); createFile(srcFS, inputFile2, true, data2); Path target = new Path(dstDir, "outputDir"); if (directWrite) { runDistCpDirectWrite(inputDir, target); } else { runDistCpWithRename(inputDir, target); } ContractTestUtils.assertIsDirectory(dstFS, target); lsR("Destination tree after distcp", dstFS, target); // Verify copied file contents verifyFileContents(dstFS, new Path(target, "inputDir/file1"), data1); verifyFileContents(dstFS, new Path(target, "inputDir/subDir1/file2"), data2); } /** * Run distcp -direct srcDir destDir. * @param srcDir local source directory * @param destDir remote destination directory * @return the completed job * @throws Exception any failure. */ private Job runDistCpDirectWrite(final Path srcDir, final Path destDir) throws Exception { describe("\nDistcp -direct from " + srcDir + " to " + destDir); return runDistCp(buildWithStandardOptions( new DistCpOptions.Builder( Collections.singletonList(srcDir), destDir) .withDirectWrite(true))); } /** * Run distcp srcDir destDir. * @param srcDir local source directory * @param destDir remote destination directory * @return the completed job * @throws Exception any failure. */ private Job runDistCpWithRename(Path srcDir, final Path destDir) throws Exception { describe("\nDistcp from " + srcDir + " to " + destDir); return runDistCp(buildWithStandardOptions( new DistCpOptions.Builder( Collections.singletonList(srcDir), destDir) .withDirectWrite(false))); } @Test public void testDistCpWithFile() throws Exception { describe("Distcp only file"); Path source = new Path(remoteDir, "file"); Path dest = new Path(localDir, "file"); dest = localFS.makeQualified(dest); mkdirs(localFS, localDir); int len = 4; int base = 0x40; byte[] block = dataset(len, base, base + len); createFile(remoteFS, source, true, block); verifyPathExists(remoteFS, "", source); verifyPathExists(localFS, "", localDir); assertRunDistCp(DistCpConstants.SUCCESS, source.toString(), dest.toString(), getDefaultCLIOptionsOrNull(), conf); assertThat(RemoteIterators.toList(localFS.listFiles(dest, true))) .describedAs("files").hasSize(1); verifyFileContents(localFS, dest, block); } @Test public void testDistCpWithUpdateExistFile() throws Exception { describe("Now update an existing file."); Path source = new Path(remoteDir, "file"); Path dest = new Path(localDir, "file"); dest = localFS.makeQualified(dest); int len = 4; int base = 0x40; byte[] block = dataset(len, base, base + len); byte[] destBlock = dataset(len, base, base + len + 1); createFile(remoteFS, source, true, block); createFile(localFS, dest, true, destBlock); verifyPathExists(remoteFS, "", source); verifyPathExists(localFS, "", dest); assertRunDistCp(DistCpConstants.SUCCESS, source.toString(), dest.toString(), "-delete -update" + getDefaultCLIOptions(), conf); assertThat(RemoteIterators.toList(localFS.listFiles(dest, true))) .hasSize(1); verifyFileContents(localFS, dest, block); } @Test public void testDistCpUpdateCheckFileSkip() throws Exception { describe("Distcp update to check file skips."); Path source = new Path(remoteDir, "file"); Path dest = new Path(localDir, "file"); Path source0byte = new Path(remoteDir, "file_0byte"); Path dest0byte = new Path(localDir, "file_0byte"); dest = localFS.makeQualified(dest); dest0byte = localFS.makeQualified(dest0byte); // Creating a source file with certain dataset. byte[] sourceBlock = dataset(10, 'a', 'z'); // Write the dataset. ContractTestUtils .writeDataset(remoteFS, source, sourceBlock, sourceBlock.length, 1024, true); // Create 0 byte source and target files. createFile(remoteFS, source0byte, true, new byte[0]); createFile(localFS, dest0byte, true, new byte[0]); // Execute the distcp -update job. Job job = distCpUpdateWithFs(remoteDir, localDir, remoteFS, localFS); // First distcp -update would normally copy the source to dest. verifyFileContents(localFS, dest, sourceBlock); // Verify 1 file was skipped in the distcp -update (The 0 byte file). // Verify 1 file was copied in the distcp -update (The new source file). verifySkipAndCopyCounter(job, 1, 1); // Remove the source file and replace with a file with same name and size // but different content. remoteFS.delete(source, false); Path updatedSource = new Path(remoteDir, "file"); byte[] updatedSourceBlock = dataset(10, 'b', 'z'); ContractTestUtils.writeDataset(remoteFS, updatedSource, updatedSourceBlock, updatedSourceBlock.length, 1024, true); // For testing purposes we would take the modification time of the // updated Source file and add an offset or subtract the offset and set // that time as the modification time for target file, this way we can // ensure that our test can emulate a scenario where source is either more // recently changed after -update so that copy takes place or target file // is more recently changed which would skip the copying since the source // has not been recently updated. FileStatus fsSourceUpd = remoteFS.getFileStatus(updatedSource); long modTimeSourceUpd = fsSourceUpd.getModificationTime(); // Add by an offset which would ensure enough gap for the test to // not fail due to race conditions. long newTargetModTimeNew = modTimeSourceUpd + MODIFICATION_TIME_OFFSET; localFS.setTimes(dest, newTargetModTimeNew, -1); // Execute the distcp -update job. Job updatedSourceJobOldSrc = distCpUpdateWithFs(remoteDir, localDir, remoteFS, localFS); // File contents should remain same since the mod time for target is // newer than the updatedSource which indicates that the sync happened // more recently and there is no update. verifyFileContents(localFS, dest, sourceBlock); // Skipped both 0 byte file and sourceFile (since mod time of target is // older than the source it is perceived that source is of older version // and we can skip it's copy). verifySkipAndCopyCounter(updatedSourceJobOldSrc, 2, 0); // Subtract by an offset which would ensure enough gap for the test to // not fail due to race conditions. long newTargetModTimeOld = Math.min(modTimeSourceUpd - MODIFICATION_TIME_OFFSET, 0); localFS.setTimes(dest, newTargetModTimeOld, -1); // Execute the distcp -update job. Job updatedSourceJobNewSrc = distCpUpdateWithFs(remoteDir, localDir, remoteFS, localFS); // Verifying the target directory have both 0 byte file and the content // file. assertThat(RemoteIterators.toList(localFS.listFiles(localDir, true))) .hasSize(2); // Now the copy should take place and the file contents should change // since the mod time for target is older than the source file indicating // that there was an update to the source after the last sync took place. verifyFileContents(localFS, dest, updatedSourceBlock); // Verifying we skipped the 0 byte file and copied the updated source // file (since the modification time of the new source is older than the // target now). verifySkipAndCopyCounter(updatedSourceJobNewSrc, 1, 1); } /** * Method to check the skipped and copied counters of a distcp job. * * @param job job to check. * @param skipExpectedValue expected skip counter value. * @param copyExpectedValue expected copy counter value. * @throws IOException throw in case of failures. */ private void verifySkipAndCopyCounter(Job job, int skipExpectedValue, int copyExpectedValue) throws IOException { // get the skip and copy counters from the job. long skipActualValue = job.getCounters() .findCounter(CopyMapper.Counter.SKIP).getValue(); long copyActualValue = job.getCounters() .findCounter(CopyMapper.Counter.COPY).getValue(); // Verify if the actual values equals the expected ones. assertThat(copyActualValue) .withFailMessage("Mismatch in COPY counter value") .isEqualTo(copyExpectedValue); assertThat(skipActualValue) .withFailMessage("Mismatch in SKIP counter value") .isEqualTo(skipExpectedValue); } /** * Runs distcp from src to dst, preserving XAttrs. Asserts the * expected exit code. * * @param exitCode expected exit code * @param src distcp src path * @param dst distcp destination * @param options distcp command line options * @param conf Configuration to use * @throws Exception if there is any error */ public static void assertRunDistCp(int exitCode, String src, String dst, String options, Configuration conf) throws Exception { assertRunDistCp(exitCode, src, dst, options == null ? new String[0] : options.trim().split(" "), conf); } private static void assertRunDistCp(int exitCode, String src, String dst, String[] options, Configuration conf) throws Exception { DistCp distCp = new DistCp(conf, null); String[] optsArr = new String[options.length + 2]; System.arraycopy(options, 0, optsArr, 0, options.length); optsArr[optsArr.length - 2] = src; optsArr[optsArr.length - 1] = dst; Assertions.assertThat(ToolRunner.run(conf, distCp, optsArr)) .describedAs("Exit code of distcp %s", Arrays.stream(optsArr).collect(Collectors.joining(" "))) .isEqualTo(exitCode); } }
googleapis/google-cloud-java
37,910
java-billingbudgets/proto-google-cloud-billingbudgets-v1beta1/src/main/java/com/google/cloud/billing/budgets/v1beta1/UpdateBudgetRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/billing/budgets/v1beta1/budget_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.billing.budgets.v1beta1; /** * * * <pre> * Request for UpdateBudget * </pre> * * Protobuf type {@code google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest} */ public final class UpdateBudgetRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) UpdateBudgetRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateBudgetRequest.newBuilder() to construct. private UpdateBudgetRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateBudgetRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateBudgetRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.budgets.v1beta1.BudgetServiceOuterClass .internal_static_google_cloud_billing_budgets_v1beta1_UpdateBudgetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.budgets.v1beta1.BudgetServiceOuterClass .internal_static_google_cloud_billing_budgets_v1beta1_UpdateBudgetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.class, com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.Builder.class); } private int bitField0_; public static final int BUDGET_FIELD_NUMBER = 1; private com.google.cloud.billing.budgets.v1beta1.Budget budget_; /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the budget field is set. */ @java.lang.Override public boolean hasBudget() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The budget. */ @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.Budget getBudget() { return budget_ == null ? com.google.cloud.billing.budgets.v1beta1.Budget.getDefaultInstance() : budget_; } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.BudgetOrBuilder getBudgetOrBuilder() { return budget_ == null ? com.google.cloud.billing.budgets.v1beta1.Budget.getDefaultInstance() : budget_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getBudget()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getBudget()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest)) { return super.equals(obj); } com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest other = (com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) obj; if (hasBudget() != other.hasBudget()) return false; if (hasBudget()) { if (!getBudget().equals(other.getBudget())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasBudget()) { hash = (37 * hash) + BUDGET_FIELD_NUMBER; hash = (53 * hash) + getBudget().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateBudget * </pre> * * Protobuf type {@code google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.billing.budgets.v1beta1.BudgetServiceOuterClass .internal_static_google_cloud_billing_budgets_v1beta1_UpdateBudgetRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.billing.budgets.v1beta1.BudgetServiceOuterClass .internal_static_google_cloud_billing_budgets_v1beta1_UpdateBudgetRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.class, com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.Builder.class); } // Construct using com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getBudgetFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; budget_ = null; if (budgetBuilder_ != null) { budgetBuilder_.dispose(); budgetBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.billing.budgets.v1beta1.BudgetServiceOuterClass .internal_static_google_cloud_billing_budgets_v1beta1_UpdateBudgetRequest_descriptor; } @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest getDefaultInstanceForType() { return com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest build() { com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest buildPartial() { com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest result = new com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.budget_ = budgetBuilder_ == null ? budget_ : budgetBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) { return mergeFrom((com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest other) { if (other == com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest.getDefaultInstance()) return this; if (other.hasBudget()) { mergeBudget(other.getBudget()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getBudgetFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.billing.budgets.v1beta1.Budget budget_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.billing.budgets.v1beta1.Budget, com.google.cloud.billing.budgets.v1beta1.Budget.Builder, com.google.cloud.billing.budgets.v1beta1.BudgetOrBuilder> budgetBuilder_; /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the budget field is set. */ public boolean hasBudget() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The budget. */ public com.google.cloud.billing.budgets.v1beta1.Budget getBudget() { if (budgetBuilder_ == null) { return budget_ == null ? com.google.cloud.billing.budgets.v1beta1.Budget.getDefaultInstance() : budget_; } else { return budgetBuilder_.getMessage(); } } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBudget(com.google.cloud.billing.budgets.v1beta1.Budget value) { if (budgetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } budget_ = value; } else { budgetBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBudget( com.google.cloud.billing.budgets.v1beta1.Budget.Builder builderForValue) { if (budgetBuilder_ == null) { budget_ = builderForValue.build(); } else { budgetBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBudget(com.google.cloud.billing.budgets.v1beta1.Budget value) { if (budgetBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && budget_ != null && budget_ != com.google.cloud.billing.budgets.v1beta1.Budget.getDefaultInstance()) { getBudgetBuilder().mergeFrom(value); } else { budget_ = value; } } else { budgetBuilder_.mergeFrom(value); } if (budget_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBudget() { bitField0_ = (bitField0_ & ~0x00000001); budget_ = null; if (budgetBuilder_ != null) { budgetBuilder_.dispose(); budgetBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.billing.budgets.v1beta1.Budget.Builder getBudgetBuilder() { bitField0_ |= 0x00000001; onChanged(); return getBudgetFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.billing.budgets.v1beta1.BudgetOrBuilder getBudgetOrBuilder() { if (budgetBuilder_ != null) { return budgetBuilder_.getMessageOrBuilder(); } else { return budget_ == null ? com.google.cloud.billing.budgets.v1beta1.Budget.getDefaultInstance() : budget_; } } /** * * * <pre> * Required. The updated budget object. * The budget to update is specified by the budget name in the budget. * </pre> * * <code> * .google.cloud.billing.budgets.v1beta1.Budget budget = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.billing.budgets.v1beta1.Budget, com.google.cloud.billing.budgets.v1beta1.Budget.Builder, com.google.cloud.billing.budgets.v1beta1.BudgetOrBuilder> getBudgetFieldBuilder() { if (budgetBuilder_ == null) { budgetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.billing.budgets.v1beta1.Budget, com.google.cloud.billing.budgets.v1beta1.Budget.Builder, com.google.cloud.billing.budgets.v1beta1.BudgetOrBuilder>( getBudget(), getParentForChildren(), isClean()); budget_ = null; } return budgetBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Indicates which fields in the provided budget to update. * Read-only fields (such as `name`) cannot be changed. If this is not * provided, then only fields with non-default values from the request are * updated. See * https://developers.google.com/protocol-buffers/docs/proto3#default for more * details about default values. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) } // @@protoc_insertion_point(class_scope:google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest) private static final com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest(); } public static com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateBudgetRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateBudgetRequest>() { @java.lang.Override public UpdateBudgetRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateBudgetRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateBudgetRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.billing.budgets.v1beta1.UpdateBudgetRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/iotdb
38,012
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/service/metrics/WritingMetrics.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.db.service.metrics; import org.apache.iotdb.commons.consensus.DataRegionId; import org.apache.iotdb.commons.service.metric.MetricService; import org.apache.iotdb.commons.service.metric.enums.Metric; import org.apache.iotdb.commons.service.metric.enums.Tag; import org.apache.iotdb.db.storageengine.StorageEngine; import org.apache.iotdb.db.storageengine.dataregion.DataRegion; import org.apache.iotdb.db.storageengine.dataregion.flush.FlushManager; import org.apache.iotdb.db.storageengine.dataregion.wal.WALManager; import org.apache.iotdb.db.storageengine.dataregion.wal.checkpoint.CheckpointType; import org.apache.iotdb.db.storageengine.rescon.memory.SystemInfo; import org.apache.iotdb.metrics.AbstractMetricService; import org.apache.iotdb.metrics.impl.DoNothingMetricManager; import org.apache.iotdb.metrics.metricsets.IMetricSet; import org.apache.iotdb.metrics.type.Counter; import org.apache.iotdb.metrics.type.Gauge; import org.apache.iotdb.metrics.type.Histogram; import org.apache.iotdb.metrics.type.Timer; import org.apache.iotdb.metrics.utils.MetricLevel; import org.apache.iotdb.metrics.utils.MetricType; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; public class WritingMetrics implements IMetricSet { private static final WritingMetrics INSTANCE = new WritingMetrics(); private static final WALManager WAL_MANAGER = WALManager.getInstance(); private WritingMetrics() { // empty constructor } // region flush overview metrics public static final String FLUSH_STAGE_SORT = "sort"; public static final String FLUSH_STAGE_ENCODING = "encoding"; public static final String FLUSH_STAGE_IO = "io"; public static final String WRITE_PLAN_INDICES = "write_plan_indices"; public static final String PENDING_TASK_NUM = "pending_task_num"; public static final String PENDING_SUB_TASK_NUM = "pending_sub_task_num"; private Timer flushStageSortTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer flushStageEncodingTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer flushStageIOTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer writePlanIndicesTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private void bindFlushMetrics(AbstractMetricService metricService) { flushStageSortTimer = metricService.getOrCreateTimer( Metric.FLUSH_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), FLUSH_STAGE_SORT); flushStageEncodingTimer = metricService.getOrCreateTimer( Metric.FLUSH_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), FLUSH_STAGE_ENCODING); flushStageIOTimer = metricService.getOrCreateTimer( Metric.FLUSH_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), FLUSH_STAGE_IO); writePlanIndicesTimer = metricService.getOrCreateTimer( Metric.FLUSH_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), WRITE_PLAN_INDICES); metricService.createAutoGauge( Metric.PENDING_FLUSH_TASK.toString(), MetricLevel.IMPORTANT, FlushManager.getInstance(), FlushManager::getNumberOfPendingTasks, Tag.TYPE.toString(), PENDING_TASK_NUM); metricService.createAutoGauge( Metric.PENDING_FLUSH_TASK.toString(), MetricLevel.IMPORTANT, FlushManager.getInstance(), FlushManager::getNumberOfPendingSubTasks, Tag.TYPE.toString(), PENDING_SUB_TASK_NUM); } private void unbindFlushMetrics(AbstractMetricService metricService) { flushStageSortTimer = DoNothingMetricManager.DO_NOTHING_TIMER; flushStageEncodingTimer = DoNothingMetricManager.DO_NOTHING_TIMER; flushStageIOTimer = DoNothingMetricManager.DO_NOTHING_TIMER; writePlanIndicesTimer = DoNothingMetricManager.DO_NOTHING_TIMER; Arrays.asList(FLUSH_STAGE_SORT, FLUSH_STAGE_ENCODING, FLUSH_STAGE_IO, WRITE_PLAN_INDICES) .forEach( stage -> metricService.remove( MetricType.TIMER, Metric.FLUSH_COST.toString(), Tag.STAGE.toString(), stage)); Arrays.asList(PENDING_TASK_NUM, PENDING_SUB_TASK_NUM) .forEach( name -> metricService.remove( MetricType.AUTO_GAUGE, Metric.PENDING_FLUSH_TASK.toString(), Tag.NAME.toString(), name)); } // endregion // region flush subtask metrics public static final String SORT_TASK = "sort_task"; public static final String ENCODING_TASK = "encoding_task"; public static final String IO_TASK = "io_task"; private Timer sortTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer encodingTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer ioTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private void bindFlushSubTaskMetrics(AbstractMetricService metricService) { sortTaskTimer = metricService.getOrCreateTimer( Metric.FLUSH_SUB_TASK_COST.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), SORT_TASK); encodingTaskTimer = metricService.getOrCreateTimer( Metric.FLUSH_SUB_TASK_COST.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), ENCODING_TASK); ioTaskTimer = metricService.getOrCreateTimer( Metric.FLUSH_SUB_TASK_COST.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), IO_TASK); } private void unbindFlushSubTaskMetrics(AbstractMetricService metricService) { sortTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; encodingTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; ioTaskTimer = DoNothingMetricManager.DO_NOTHING_TIMER; Arrays.asList(SORT_TASK, ENCODING_TASK, IO_TASK) .forEach( type -> metricService.remove( MetricType.TIMER, Metric.FLUSH_SUB_TASK_COST.toString(), Tag.TYPE.toString(), type)); } // endregion // region wal overview metrics public static final String WAL_NODES_NUM = "wal_nodes_num"; public static final String USED_RATIO = "used_ratio"; public static final String SERIALIZED_WAL_BUFFER_SIZE_BYTE = "serialized_wal_buffer_size"; public static final String WROTE_WAL_BUFFER_SIZE_BYTE = "wrote_wal_buffer_size"; public static final String WAL_COMPRESS_COST_NS = "wal_compress_cost"; public static final String WAL_UNCOMPRESS_COST_NS = "wal_uncompress_cost"; public static final String READ_WAL_BUFFER_SIZE_BYTE = "read_wal_buffer_size"; public static final String READ_WAL_BUFFER_COST_NS = "read_wal_buffer_cost"; public static final String WRITE_WAL_BUFFER_COST_NS = "write_wal_buffer_cost"; public static final String ENTRIES_COUNT = "entries_count"; public static final String WAL_ENTRY_NUM_FOR_ONE_TSFILE = "wal_entry_num_for_one_tsfile"; public static final String WAL_QUEUE_CURRENT_MEM_COST = "wal_queue_current_mem_cost"; public static final String WAL_QUEUE_MAX_MEM_COST = "wal_queue_max_mem_cost"; private Histogram usedRatioHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram entriesCountHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram walEntryNumForOneTsFileHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram serializedWALBufferSizeHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram wroteWALBufferSizeHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram walCompressCostHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram walUncompressCostHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram readWALBufferSizeHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram readWALBufferCostHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Histogram writeWALBufferCostHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; private Gauge walQueueMaxMemSizeGauge = DoNothingMetricManager.DO_NOTHING_GAUGE; private void bindWALMetrics(AbstractMetricService metricService) { metricService.createAutoGauge( Metric.WAL_NODE_NUM.toString(), MetricLevel.IMPORTANT, WAL_MANAGER, WALManager::getWALNodesNum, Tag.NAME.toString(), WAL_NODES_NUM); usedRatioHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), USED_RATIO); entriesCountHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), ENTRIES_COUNT); walEntryNumForOneTsFileHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WAL_ENTRY_NUM_FOR_ONE_TSFILE); serializedWALBufferSizeHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), SERIALIZED_WAL_BUFFER_SIZE_BYTE); wroteWALBufferSizeHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WROTE_WAL_BUFFER_SIZE_BYTE); walCompressCostHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WAL_COMPRESS_COST_NS); walUncompressCostHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WAL_UNCOMPRESS_COST_NS); readWALBufferSizeHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), READ_WAL_BUFFER_SIZE_BYTE); readWALBufferCostHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), READ_WAL_BUFFER_COST_NS); writeWALBufferCostHistogram = metricService.getOrCreateHistogram( Metric.WAL_BUFFER.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WRITE_WAL_BUFFER_COST_NS); walQueueMaxMemSizeGauge = metricService.getOrCreateGauge( Metric.WAL_QUEUE_MEM_COST.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), WAL_QUEUE_MAX_MEM_COST); SystemInfo systemInfo = SystemInfo.getInstance(); metricService.createAutoGauge( Metric.WAL_QUEUE_MEM_COST.toString(), MetricLevel.IMPORTANT, systemInfo, (s) -> s.getWalBufferQueueMemoryBlock().getUsedMemoryInBytes(), Tag.NAME.toString(), WAL_QUEUE_CURRENT_MEM_COST); } private void unbindWALMetrics(AbstractMetricService metricService) { metricService.remove( MetricType.AUTO_GAUGE, Metric.WAL_NODE_NUM.toString(), Tag.NAME.toString(), WAL_NODES_NUM); usedRatioHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; entriesCountHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; walEntryNumForOneTsFileHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; Arrays.asList( USED_RATIO, ENTRIES_COUNT, SERIALIZED_WAL_BUFFER_SIZE_BYTE, WROTE_WAL_BUFFER_SIZE_BYTE, WAL_COMPRESS_COST_NS, WAL_UNCOMPRESS_COST_NS, READ_WAL_BUFFER_SIZE_BYTE, READ_WAL_BUFFER_COST_NS, WRITE_WAL_BUFFER_COST_NS) .forEach( name -> metricService.remove( MetricType.HISTOGRAM, Metric.WAL_BUFFER.toString(), Tag.NAME.toString(), name)); metricService.remove( MetricType.AUTO_GAUGE, Metric.WAL_QUEUE_MEM_COST.toString(), Tag.NAME.toString(), WAL_QUEUE_CURRENT_MEM_COST); metricService.remove( MetricType.GAUGE, Metric.WAL_QUEUE_MEM_COST.toString(), Tag.NAME.toString(), WAL_QUEUE_MAX_MEM_COST); } // endregion // region wal cost metrics public static final String MAKE_CHECKPOINT = "make_checkpoint"; public static final String SERIALIZE_WAL_ENTRY = "serialize_wal_entry"; public static final String SERIALIZE_WAL_ENTRY_TOTAL = "serialize_wal_entry_total"; public static final String SYNC_WAL_BUFFER = "sync_wal_buffer"; public static final String SYNC = "sync"; public static final String FSYNC = "fsync"; private Timer globalMemoryTableInfoTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer createMemoryTableTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer flushMemoryTableTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer serializeWalEntryTotalTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer syncTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Timer fsyncTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private void bindWALCostMetrics(AbstractMetricService metricService) { globalMemoryTableInfoTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), MAKE_CHECKPOINT, Tag.TYPE.toString(), CheckpointType.GLOBAL_MEMORY_TABLE_INFO.toString()); createMemoryTableTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), MAKE_CHECKPOINT, Tag.TYPE.toString(), CheckpointType.CREATE_MEMORY_TABLE.toString()); flushMemoryTableTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), MAKE_CHECKPOINT, Tag.TYPE.toString(), CheckpointType.FLUSH_MEMORY_TABLE.toString()); serializeWalEntryTotalTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), SERIALIZE_WAL_ENTRY, Tag.TYPE.toString(), SERIALIZE_WAL_ENTRY_TOTAL); syncTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), SYNC_WAL_BUFFER, Tag.TYPE.toString(), SYNC); fsyncTimer = metricService.getOrCreateTimer( Metric.WAL_COST.toString(), MetricLevel.IMPORTANT, Tag.STAGE.toString(), SYNC_WAL_BUFFER, Tag.TYPE.toString(), FSYNC); } private void unbindWALCostMetrics(AbstractMetricService metricService) { globalMemoryTableInfoTimer = DoNothingMetricManager.DO_NOTHING_TIMER; createMemoryTableTimer = DoNothingMetricManager.DO_NOTHING_TIMER; flushMemoryTableTimer = DoNothingMetricManager.DO_NOTHING_TIMER; serializeWalEntryTotalTimer = DoNothingMetricManager.DO_NOTHING_TIMER; syncTimer = DoNothingMetricManager.DO_NOTHING_TIMER; fsyncTimer = DoNothingMetricManager.DO_NOTHING_TIMER; Arrays.asList( CheckpointType.GLOBAL_MEMORY_TABLE_INFO.toString(), CheckpointType.CREATE_MEMORY_TABLE.toString(), CheckpointType.FLUSH_MEMORY_TABLE.toString()) .forEach( type -> metricService.remove( MetricType.TIMER, Metric.WAL_COST.toString(), Tag.STAGE.toString(), MAKE_CHECKPOINT, Tag.TYPE.toString(), type)); Collections.singletonList(SERIALIZE_WAL_ENTRY_TOTAL) .forEach( type -> metricService.remove( MetricType.TIMER, Metric.WAL_COST.toString(), Tag.STAGE.toString(), SERIALIZE_WAL_ENTRY, Tag.TYPE.toString(), type)); Arrays.asList(SYNC, FSYNC) .forEach( type -> metricService.remove( MetricType.TIMER, Metric.WAL_COST.toString(), Tag.STAGE.toString(), SYNC_WAL_BUFFER, Tag.TYPE.toString(), type)); } // endregion // region manage metrics public static final String MEM_TABLE_SIZE = "mem_table_size"; public static final String POINTS_NUM = "total_points_num"; public static final String SERIES_NUM = "series_num"; public static final String AVG_SERIES_POINT_NUM = "avg_series_points_num"; public static final String COMPRESSION_RATIO = "compression_ratio"; public static final String EFFECTIVE_RATIO_INFO = "effective_ratio_info"; public static final String OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_SNAPSHOT = "oldest_mem_table_ram_when_cause_snapshot"; public static final String OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_FLUSH = "oldest_mem_table_ram_when_cause_flush"; public static final String FLUSH_TSFILE_SIZE = "flush_tsfile_size"; public static final String FLUSH_THRESHOLD = "flush_threshold"; public static final String REJECT_THRESHOLD = "reject_threshold"; public static final String TIMED_FLUSH_MEMTABLE_COUNT = "timed_flush_memtable_count"; public static final String WAL_FLUSH_MEMTABLE_COUNT = "wal_flush_memtable_count"; public static final String MANUAL_FLUSH_MEMTABLE_COUNT = "manual_flush_memtable_count"; public static final String MEM_CONTROL_FLUSH_MEMTABLE_COUNT = "mem_control_flush_memtable_count"; private Gauge flushThreholdGauge = DoNothingMetricManager.DO_NOTHING_GAUGE; private Gauge rejectThreholdGauge = DoNothingMetricManager.DO_NOTHING_GAUGE; private Timer memtableLiveTimer = DoNothingMetricManager.DO_NOTHING_TIMER; private Counter walFlushMemtableCounter = DoNothingMetricManager.DO_NOTHING_COUNTER; private Counter timedFlushMemtableCounter = DoNothingMetricManager.DO_NOTHING_COUNTER; private Counter manualFlushMemtableCounter = DoNothingMetricManager.DO_NOTHING_COUNTER; private Counter memControlFlushMemtableCounter = DoNothingMetricManager.DO_NOTHING_COUNTER; private Histogram avgPointHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; public void bindDataRegionMetrics() { List<DataRegion> allDataRegions = StorageEngine.getInstance().getAllDataRegions(); List<DataRegionId> allDataRegionIds = StorageEngine.getInstance().getAllDataRegionIds(); allDataRegions.forEach(this::createDataRegionMemoryCostMetrics); allDataRegionIds.forEach(this::createFlushingMemTableStatusMetrics); allDataRegionIds.forEach(this::createActiveMemtableCounterMetrics); createActiveTimePartitionCounterMetrics(); walFlushMemtableCounter = createWalFlushMemTableCounterMetrics(); timedFlushMemtableCounter = createTimedFlushMemTableCounterMetrics(); manualFlushMemtableCounter = createManualFlushMemTableCounterMetrics(); memControlFlushMemtableCounter = createMemControlFlushMemTableCounterMetrics(); flushThreholdGauge = MetricService.getInstance() .getOrCreateGauge( Metric.MEMTABLE_THRESHOLD.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), FLUSH_THRESHOLD); rejectThreholdGauge = MetricService.getInstance() .getOrCreateGauge( Metric.MEMTABLE_THRESHOLD.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), REJECT_THRESHOLD); memtableLiveTimer = MetricService.getInstance() .getOrCreateTimer(Metric.MEMTABLE_LIVE_DURATION.toString(), MetricLevel.IMPORTANT); } public void unbindDataRegionMetrics() { List<DataRegionId> allDataRegionIds = StorageEngine.getInstance().getAllDataRegionIds(); allDataRegionIds.forEach( dataRegionId -> { removeDataRegionMemoryCostMetrics(dataRegionId); removeFlushingMemTableStatusMetrics(dataRegionId); removeActiveMemtableCounterMetrics(dataRegionId); }); removeActiveTimePartitionCounterMetrics(); removeTimedFlushMemTableCounterMetrics(); removeWalFlushMemTableCounterMetrics(); removeManualFlushMemTableCounterMetrics(); removeMemControlFlushMemTableCounterMetrics(); MetricService.getInstance() .remove( MetricType.GAUGE, Metric.MEMTABLE_THRESHOLD.toString(), Tag.TYPE.toString(), FLUSH_THRESHOLD); MetricService.getInstance() .remove( MetricType.GAUGE, Metric.MEMTABLE_THRESHOLD.toString(), Tag.TYPE.toString(), REJECT_THRESHOLD); MetricService.getInstance().remove(MetricType.TIMER, Metric.MEMTABLE_LIVE_DURATION.toString()); } public void createDataRegionMemoryCostMetrics(DataRegion dataRegion) { DataRegionId dataRegionId = new DataRegionId(Integer.parseInt(dataRegion.getDataRegionId())); MetricService.getInstance() .createAutoGauge( Metric.DATA_REGION_MEM_COST.toString(), MetricLevel.IMPORTANT, dataRegion, DataRegion::getMemCost, Tag.REGION.toString(), dataRegionId.toString()); } public void removeDataRegionMemoryCostMetrics(DataRegionId dataRegionId) { MetricService.getInstance() .remove( MetricType.AUTO_GAUGE, Metric.DATA_REGION_MEM_COST.toString(), Tag.REGION.toString(), dataRegionId.toString()); } public void createWALNodeInfoMetrics(String walNodeId) { Arrays.asList( EFFECTIVE_RATIO_INFO, OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_SNAPSHOT, OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_FLUSH) .forEach( name -> MetricService.getInstance() .getOrCreateHistogram( Metric.WAL_NODE_INFO.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), name, Tag.TYPE.toString(), walNodeId)); } public void removeWALNodeInfoMetrics(String walNodeId) { Arrays.asList( EFFECTIVE_RATIO_INFO, OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_SNAPSHOT, OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_FLUSH) .forEach( name -> MetricService.getInstance() .remove( MetricType.HISTOGRAM, Metric.WAL_NODE_INFO.toString(), Tag.NAME.toString(), name, Tag.TYPE.toString(), walNodeId)); } public void createFlushingMemTableStatusMetrics(DataRegionId dataRegionId) { Arrays.asList(MEM_TABLE_SIZE, SERIES_NUM, POINTS_NUM, COMPRESSION_RATIO, FLUSH_TSFILE_SIZE) .forEach( name -> MetricService.getInstance() .getOrCreateHistogram( Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), name, Tag.REGION.toString(), dataRegionId.toString())); avgPointHistogram = MetricService.getInstance() .getOrCreateHistogram( Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), AVG_SERIES_POINT_NUM, Tag.REGION.toString(), dataRegionId.toString()); } public Counter createWalFlushMemTableCounterMetrics() { return MetricService.getInstance() .getOrCreateCounter( Metric.FLUSH_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), WAL_FLUSH_MEMTABLE_COUNT); } public Counter createTimedFlushMemTableCounterMetrics() { return MetricService.getInstance() .getOrCreateCounter( Metric.FLUSH_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), TIMED_FLUSH_MEMTABLE_COUNT); } public Counter createManualFlushMemTableCounterMetrics() { return MetricService.getInstance() .getOrCreateCounter( Metric.FLUSH_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), MANUAL_FLUSH_MEMTABLE_COUNT); } public Counter createMemControlFlushMemTableCounterMetrics() { return MetricService.getInstance() .getOrCreateCounter( Metric.FLUSH_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.TYPE.toString(), MEM_CONTROL_FLUSH_MEMTABLE_COUNT); } public void createActiveMemtableCounterMetrics(DataRegionId dataRegionId) { MetricService.getInstance() .getOrCreateCounter( Metric.ACTIVE_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.REGION.toString(), dataRegionId.toString()); } public void createActiveTimePartitionCounterMetrics() { MetricService.getInstance() .getOrCreateCounter(Metric.ACTIVE_TIME_PARTITION_COUNT.toString(), MetricLevel.IMPORTANT); } public void removeTimedFlushMemTableCounterMetrics() { MetricService.getInstance() .remove( MetricType.COUNTER, Metric.FLUSH_MEMTABLE_COUNT.toString(), Tag.TYPE.toString(), TIMED_FLUSH_MEMTABLE_COUNT); } public void removeWalFlushMemTableCounterMetrics() { MetricService.getInstance() .remove( MetricType.COUNTER, Metric.FLUSH_MEMTABLE_COUNT.toString(), Tag.TYPE.toString(), WAL_FLUSH_MEMTABLE_COUNT); } public void removeManualFlushMemTableCounterMetrics() { MetricService.getInstance() .remove( MetricType.COUNTER, Metric.FLUSH_MEMTABLE_COUNT.toString(), Tag.TYPE.toString(), MANUAL_FLUSH_MEMTABLE_COUNT); } public void removeMemControlFlushMemTableCounterMetrics() { MetricService.getInstance() .remove( MetricType.COUNTER, Metric.FLUSH_MEMTABLE_COUNT.toString(), Tag.TYPE.toString(), MEM_CONTROL_FLUSH_MEMTABLE_COUNT); } public void removeActiveMemtableCounterMetrics(DataRegionId dataRegionId) { MetricService.getInstance() .remove( MetricType.COUNTER, Metric.ACTIVE_MEMTABLE_COUNT.toString(), Tag.REGION.toString(), dataRegionId.toString()); } public void removeActiveTimePartitionCounterMetrics() { MetricService.getInstance() .remove(MetricType.COUNTER, Metric.ACTIVE_TIME_PARTITION_COUNT.toString()); } public void removeFlushingMemTableStatusMetrics(DataRegionId dataRegionId) { Arrays.asList( MEM_TABLE_SIZE, SERIES_NUM, POINTS_NUM, AVG_SERIES_POINT_NUM, COMPRESSION_RATIO, FLUSH_TSFILE_SIZE) .forEach( name -> MetricService.getInstance() .remove( MetricType.HISTOGRAM, Metric.FLUSHING_MEM_TABLE_STATUS.toString(), Tag.NAME.toString(), name, Tag.REGION.toString(), dataRegionId.toString())); avgPointHistogram = DoNothingMetricManager.DO_NOTHING_HISTOGRAM; } public void recordWALNodeEffectiveInfoRatio(String walNodeId, double ratio) { MetricService.getInstance() .histogram( (long) (ratio * 100), Metric.WAL_NODE_INFO.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), EFFECTIVE_RATIO_INFO, Tag.TYPE.toString(), walNodeId); } public void recordMemTableRamWhenCauseSnapshot(String walNodeId, long ram) { MetricService.getInstance() .histogram( ram, Metric.WAL_NODE_INFO.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_SNAPSHOT, Tag.TYPE.toString(), walNodeId); } public void recordMemTableRamWhenCauseFlush(String walNodeId, long ram) { MetricService.getInstance() .histogram( ram, Metric.WAL_NODE_INFO.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), OLDEST_MEM_TABLE_RAM_WHEN_CAUSE_FLUSH, Tag.TYPE.toString(), walNodeId); } public void recordTsFileCompressionRatioOfFlushingMemTable( String dataRegionId, double compressionRatio) { MetricService.getInstance() .histogram( (long) (compressionRatio * 100), Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), COMPRESSION_RATIO, Tag.REGION.toString(), new DataRegionId(Integer.parseInt(dataRegionId)).toString()); } public void recordFlushingMemTableStatus( String storageGroup, long memSize, long seriesNum, long totalPointsNum, long avgSeriesNum) { DataRegionId dataRegionId = getDataRegionIdFromStorageGroupStr(storageGroup); if (dataRegionId == null) { return; } MetricService.getInstance() .histogram( memSize, Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), MEM_TABLE_SIZE, Tag.REGION.toString(), dataRegionId.toString()); MetricService.getInstance() .histogram( seriesNum, Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), SERIES_NUM, Tag.REGION.toString(), dataRegionId.toString()); MetricService.getInstance() .histogram( totalPointsNum, Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), POINTS_NUM, Tag.REGION.toString(), dataRegionId.toString()); avgPointHistogram.update(avgSeriesNum); } public void recordFlushTsFileSize(String storageGroup, long size) { DataRegionId dataRegionId = getDataRegionIdFromStorageGroupStr(storageGroup); if (dataRegionId == null) { return; } MetricService.getInstance() .histogram( size, Metric.FLUSHING_MEM_TABLE_STATUS.toString(), MetricLevel.IMPORTANT, Tag.NAME.toString(), FLUSH_TSFILE_SIZE, Tag.REGION.toString(), dataRegionId.toString()); } private DataRegionId getDataRegionIdFromStorageGroupStr(String storageGroup) { if (Objects.isNull(storageGroup)) { return null; } int idx = storageGroup.lastIndexOf('-'); if (idx == -1) { return null; } String dataRegionIdStr = storageGroup.substring(idx + 1); return new DataRegionId(Integer.parseInt(dataRegionIdStr)); } public void recordFlushCost(String stage, long costTimeInMillis) { switch (stage) { case FLUSH_STAGE_SORT: flushStageSortTimer.updateMillis(costTimeInMillis); break; case FLUSH_STAGE_ENCODING: flushStageEncodingTimer.updateMillis(costTimeInMillis); break; case FLUSH_STAGE_IO: flushStageIOTimer.updateMillis(costTimeInMillis); break; case WRITE_PLAN_INDICES: writePlanIndicesTimer.updateMillis(costTimeInMillis); break; default: // do nothing break; } } public void recordFlushSubTaskCost(String subTaskType, long costTimeInMillis) { switch (subTaskType) { case SORT_TASK: sortTaskTimer.updateMillis(costTimeInMillis); break; case ENCODING_TASK: encodingTaskTimer.updateMillis(costTimeInMillis); break; case IO_TASK: ioTaskTimer.updateMillis(costTimeInMillis); break; default: // do nothing break; } } public void recordMakeCheckpointCost(CheckpointType type, long costTimeInNanos) { switch (type) { case GLOBAL_MEMORY_TABLE_INFO: globalMemoryTableInfoTimer.updateNanos(costTimeInNanos); break; case CREATE_MEMORY_TABLE: createMemoryTableTimer.updateNanos(costTimeInNanos); break; case FLUSH_MEMORY_TABLE: flushMemoryTableTimer.updateNanos(costTimeInNanos); break; default: // do nothing break; } } public void recordSerializeWALEntryTotalCost(long costTimeInNanos) { serializeWalEntryTotalTimer.updateNanos(costTimeInNanos); } public void recordCompressWALBufferCost(long costTimeInNanos) { walCompressCostHistogram.update(costTimeInNanos); } public void recordWroteWALBuffer(int serializedSize, int wroteSize, long wroteTimeCostInNanos) { serializedWALBufferSizeHistogram.update(serializedSize); wroteWALBufferSizeHistogram.update(wroteSize); writeWALBufferCostHistogram.update(wroteTimeCostInNanos); } public void recordWALUncompressCost(long costTimeInNanos) { walUncompressCostHistogram.update(costTimeInNanos); } public void recordWALRead(long size, long costTimeInNanos) { readWALBufferSizeHistogram.update(size); readWALBufferCostHistogram.update(costTimeInNanos); } public void recordSyncWALBufferCost(long costTimeInNanos, boolean forceFlag) { if (forceFlag) { // fsync mode fsyncTimer.updateNanos(costTimeInNanos); } else { // sync mode syncTimer.updateNanos(costTimeInNanos); } } public void recordWALBufferUsedRatio(double usedRatio) { usedRatioHistogram.update((long) (usedRatio * 100)); } public void recordWALBufferEntriesCount(long count) { entriesCountHistogram.update(count); } public void recordWALEntryNumForOneTsFile(long count) { walEntryNumForOneTsFileHistogram.update(count); } public void recordWALQueueMaxMemorySize(long size) { walQueueMaxMemSizeGauge.set(size); } public void recordFlushThreshold(double flushThreshold) { flushThreholdGauge.set((long) flushThreshold); } public void recordRejectThreshold(double rejectThreshold) { rejectThreholdGauge.set((long) rejectThreshold); } public void recordMemTableLiveDuration(long durationMillis) { memtableLiveTimer.updateMillis(durationMillis); } public void recordTimedFlushMemTableCount(int number) { timedFlushMemtableCounter.inc(number); } public void recordWalFlushMemTableCount(int number) { walFlushMemtableCounter.inc(number); } public void recordManualFlushMemTableCount(int number) { manualFlushMemtableCounter.inc(number); } public void recordMemControlFlushMemTableCount(int number) { memControlFlushMemtableCounter.inc(number); } public void recordActiveMemTableCount(String dataRegionId, int number) { MetricService.getInstance() .count( number, Metric.ACTIVE_MEMTABLE_COUNT.toString(), MetricLevel.IMPORTANT, Tag.REGION.toString(), dataRegionId); } public void recordActiveTimePartitionCount(int number) { MetricService.getInstance() .count(number, Metric.ACTIVE_TIME_PARTITION_COUNT.toString(), MetricLevel.IMPORTANT); } // endregion @Override public void bindTo(AbstractMetricService metricService) { bindFlushMetrics(metricService); bindFlushSubTaskMetrics(metricService); bindWALMetrics(metricService); bindWALCostMetrics(metricService); bindDataRegionMetrics(); } @Override public void unbindFrom(AbstractMetricService metricService) { unbindFlushMetrics(metricService); unbindFlushSubTaskMetrics(metricService); unbindWALMetrics(metricService); unbindWALCostMetrics(metricService); unbindDataRegionMetrics(); } public static WritingMetrics getInstance() { return INSTANCE; } public Histogram getAvgPointHistogram() { return avgPointHistogram; } }
google/nomulus
38,116
core/src/main/java/google/registry/model/domain/DomainBase.java
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.model.domain; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.emptyToNull; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.ImmutableSortedSet.toImmutableSortedSet; import static com.google.common.collect.Sets.difference; import static com.google.common.collect.Sets.intersection; import static google.registry.model.EppResourceUtils.projectResourceOntoBuilderAtTime; import static google.registry.model.EppResourceUtils.setAutomaticTransferSuccessProperties; import static google.registry.persistence.transaction.TransactionManagerFactory.tm; import static google.registry.util.CollectionUtils.forceEmptyToNull; import static google.registry.util.CollectionUtils.nullToEmpty; import static google.registry.util.CollectionUtils.nullToEmptyImmutableCopy; import static google.registry.util.CollectionUtils.union; import static google.registry.util.DateTimeUtils.END_OF_TIME; import static google.registry.util.DateTimeUtils.earliestOf; import static google.registry.util.DateTimeUtils.isBeforeOrAt; import static google.registry.util.DateTimeUtils.leapSafeAddYears; import static google.registry.util.DomainNameUtils.canonicalizeHostname; import static google.registry.util.DomainNameUtils.getTldFromDomainName; import static google.registry.util.PreconditionsUtils.checkArgumentNotNull; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.gson.annotations.Expose; import google.registry.flows.ResourceFlowUtils; import google.registry.model.EppResource; import google.registry.model.EppResource.ResourceWithTransferData; import google.registry.model.billing.BillingRecurrence; import google.registry.model.contact.Contact; import google.registry.model.domain.DesignatedContact.Type; import google.registry.model.domain.launch.LaunchNotice; import google.registry.model.domain.rgp.GracePeriodStatus; import google.registry.model.domain.secdns.DomainDsData; import google.registry.model.domain.token.AllocationToken; import google.registry.model.domain.token.AllocationToken.TokenType; import google.registry.model.eppcommon.StatusValue; import google.registry.model.host.Host; import google.registry.model.poll.PollMessage; import google.registry.model.poll.PollMessage.Autorenew; import google.registry.model.poll.PollMessage.OneTime; import google.registry.model.tld.Tld; import google.registry.model.transfer.DomainTransferData; import google.registry.model.transfer.TransferStatus; import google.registry.persistence.VKey; import google.registry.tldconfig.idn.IdnLabelValidator; import google.registry.tmch.LordnTaskUtils.LordnPhase; import google.registry.tmch.NordnUploadAction; import google.registry.util.CollectionUtils; import google.registry.util.DateTimeUtils; import jakarta.persistence.Access; import jakarta.persistence.AccessType; import jakarta.persistence.AttributeOverride; import jakarta.persistence.AttributeOverrides; import jakarta.persistence.Column; import jakarta.persistence.Embeddable; import jakarta.persistence.Embedded; import jakarta.persistence.EnumType; import jakarta.persistence.Enumerated; import jakarta.persistence.Id; import jakarta.persistence.MappedSuperclass; import jakarta.persistence.Transient; import java.util.HashSet; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import javax.annotation.Nullable; import org.hibernate.collection.spi.PersistentSet; import org.joda.time.DateTime; import org.joda.time.Interval; /** * A persistable domain resource including mutable and non-mutable fields. * * <p>This class deliberately does not include an {@link Id} so that any foreign-keyed fields can * refer to the proper parent entity's ID, whether we're storing this in the DB itself or as part of * another entity. * * @see <a href="https://tools.ietf.org/html/rfc5731">RFC 5731</a> */ @MappedSuperclass @Embeddable @Access(AccessType.FIELD) public class DomainBase extends EppResource implements ResourceWithTransferData<DomainTransferData> { /** The max number of years that a domain can be registered for, as set by ICANN policy. */ public static final int MAX_REGISTRATION_YEARS = 10; /** Status values which prohibit DNS information from being published. */ private static final ImmutableSet<StatusValue> DNS_PUBLISHING_PROHIBITED_STATUSES = ImmutableSet.of( StatusValue.CLIENT_HOLD, StatusValue.INACTIVE, StatusValue.PENDING_DELETE, StatusValue.SERVER_HOLD); /** * Fully qualified domain name (puny-coded), which serves as the foreign key for this domain. * * <p>This is only unique in the sense that for any given lifetime specified as the time range * from (creationTime, deletionTime) there can only be one domain in the database with this name. * However, there can be many domains with the same name and non-overlapping lifetimes. * * <p>Invariant: domainName == domainName.toLowerCase(Locale.ENGLISH) */ @Expose String domainName; /** The top level domain this is under, de-normalized from {@link #domainName}. */ String tld; /** References to hosts that are the nameservers for the domain. */ @Expose @Transient Set<VKey<Host>> nsHosts; /** Contacts. */ @Expose @Nullable VKey<Contact> adminContact; @Expose @Nullable VKey<Contact> billingContact; @Expose @Nullable VKey<Contact> techContact; @Expose @Nullable VKey<Contact> registrantContact; /** Authorization info (aka transfer secret) of the domain. */ @Embedded @AttributeOverrides({ @AttributeOverride(name = "pw.value", column = @Column(name = "auth_info_value")), @AttributeOverride(name = "pw.repoId", column = @Column(name = "auth_info_repo_id")), }) @Nullable DomainAuthInfo authInfo; /** Data used to construct DS records for this domain. */ @Transient Set<DomainDsData> dsData; /** * The claims notice supplied when this domain was created, if there was one. * * <p>It's {@literal @}XmlTransient because it's not returned in an info response. */ @Embedded @AttributeOverrides({ @AttributeOverride(name = "noticeId.tcnId", column = @Column(name = "launch_notice_tcn_id")), @AttributeOverride( name = "noticeId.validatorId", column = @Column(name = "launch_notice_validator_id")), @AttributeOverride( name = "expirationTime", column = @Column(name = "launch_notice_expiration_time")), @AttributeOverride( name = "acceptedTime", column = @Column(name = "launch_notice_accepted_time")), }) LaunchNotice launchNotice; /** * Name of first IDN table associated with TLD that matched the characters in this domain label. * * @see IdnLabelValidator#findValidIdnTableForTld */ String idnTableName; /** Fully qualified host names of this domain's active subordinate hosts. */ @Expose Set<String> subordinateHosts; /** When this domain's registration will expire. */ @Expose DateTime registrationExpirationTime; /** * The poll message associated with this domain being deleted. * * <p>This field should be null if the domain is not in pending delete. If it is, the field should * refer to a {@link PollMessage} timed to when the domain is fully deleted. If the domain is * restored, the message should be deleted. */ @Column(name = "deletion_poll_message_id") VKey<OneTime> deletePollMessage; /** * The recurring billing event associated with this domain's autorenewals. * * <p>The recurrence should be open-ended unless the domain is in pending delete or fully deleted, * in which case it should be closed at the time the delete was requested. Whenever the domain's * {@link #registrationExpirationTime} is changed the recurrence should be closed, a new one * should be created, and this field should be updated to point to the new one. */ @Column(name = "billing_recurrence_id") VKey<BillingRecurrence> autorenewBillingEvent; /** * The recurring poll message associated with this domain's autorenewals. * * <p>The recurrence should be open-ended unless the domain is in pending delete or fully deleted, * in which case it should be closed at the time the delete was requested. Whenever the domain's * {@link #registrationExpirationTime} is changed the recurrence should be closed, a new one * should be created, and this field should be updated to point to the new one. */ @Column(name = "autorenew_poll_message_id") VKey<Autorenew> autorenewPollMessage; /** The unexpired grace periods for this domain (some of which may not be active yet). */ @Transient Set<GracePeriod> gracePeriods; /** * The id of the signed mark that was used to create this domain in sunrise. * * <p>Will only be populated for domains created in sunrise. */ String smdId; /** Data about any pending or past transfers on this domain. */ DomainTransferData transferData; /** * The time that this resource was last transferred. * * <p>Can be null if the resource has never been transferred. */ @Expose DateTime lastTransferTime; /** * When the domain's autorenewal status will expire. * * <p>This will be {@link DateTimeUtils#END_OF_TIME} for the vast majority of domains because all * domains autorenew indefinitely by default and autorenew can only be countermanded by * administrators, typically for reasons of the URS process or termination of a registrar for * nonpayment. * * <p>When a domain is scheduled to not autorenew, this field is set to the current value of its * {@link #registrationExpirationTime}, after which point the next invocation of a periodic * cronjob will explicitly delete the domain. This field is a DateTime and not a boolean because * of edge cases that occur during the autorenew grace period. We need to be able to tell the * difference domains that have reached their life and must be deleted now, and domains that * happen to be in the autorenew grace period now but should be deleted in roughly a year. */ DateTime autorenewEndTime; /** * Which Lordn phase the domain is in after it is created but before the Nordn upload has * processed the domain. * * <p>This will almost always be {@code NONE} except in the interval between when a domain that * contains a signed mark or claims notice has been created, and when {@link NordnUploadAction} * runs, which includes the domain in the CSV uploaded to TMCH and sets this back to {@code NONE}. * * <p>Note that in the {@code DomainHistory} table this value means something slightly different: * It means that the given domain was created with a signed mark ({@code SUNRISE} or a claims * notice ({@code CLAIMS}. Unlike on the {@code Domain} table, this value is not then subsequently * set back to {@code NONE} once the Nordn upload is complete; rather, it remains as a permanent * record of when which phase the domain is in when created. */ @Enumerated(EnumType.STRING) LordnPhase lordnPhase = LordnPhase.NONE; /** * The {@link AllocationToken} for the bulk pricing package this domain is currently a part of. */ @Nullable @Column(name = "current_package_token") VKey<AllocationToken> currentBulkToken; public LordnPhase getLordnPhase() { return lordnPhase; } public ImmutableSet<String> getSubordinateHosts() { return nullToEmptyImmutableCopy(subordinateHosts); } public DateTime getRegistrationExpirationTime() { return registrationExpirationTime; } public VKey<OneTime> getDeletePollMessage() { return deletePollMessage; } public VKey<BillingRecurrence> getAutorenewBillingEvent() { return autorenewBillingEvent; } public VKey<Autorenew> getAutorenewPollMessage() { return autorenewPollMessage; } public ImmutableSet<GracePeriod> getGracePeriods() { return nullToEmptyImmutableCopy(gracePeriods); } public String getSmdId() { return smdId; } public Optional<VKey<AllocationToken>> getCurrentBulkToken() { return Optional.ofNullable(currentBulkToken); } /** * Returns the autorenew end time if there is one, otherwise empty. * * <p>Note that {@link DateTimeUtils#END_OF_TIME} is used as a sentinel value in the database * representation to signify that autorenew doesn't end, and is mapped to empty here for the * purposes of more legible business logic. */ public Optional<DateTime> getAutorenewEndTime() { return Optional.ofNullable(autorenewEndTime.equals(END_OF_TIME) ? null : autorenewEndTime); } @Override public DomainTransferData getTransferData() { return Optional.ofNullable(transferData).orElse(DomainTransferData.EMPTY); } @Override public DateTime getLastTransferTime() { return lastTransferTime; } @Override public String getForeignKey() { return domainName; } public String getDomainName() { return domainName; } public ImmutableSet<DomainDsData> getDsData() { return nullToEmptyImmutableCopy(dsData); } public LaunchNotice getLaunchNotice() { return launchNotice; } public String getIdnTableName() { return idnTableName; } public ImmutableSet<VKey<Host>> getNameservers() { return nullToEmptyImmutableCopy(nsHosts); } // Hibernate needs this in order to populate nsHosts but no one else should ever use it @SuppressWarnings("unused") private void setNsHosts(Set<VKey<Host>> nsHosts) { this.nsHosts = forceEmptyToNull(nsHosts); } // Note: for the two methods below, how we wish to treat the Hibernate setters depends on the // current state of the object and what's passed in. The key principle is that we wish to maintain // the link between parent and child objects, meaning that we should keep around whichever of the // two sets (the parameter vs the class variable and clear/populate that as appropriate). // // If the class variable is a PersistentSet, and we overwrite it here, Hibernate will throw // an exception "A collection with cascade=”all-delete-orphan” was no longer referenced by the // owning entity instance". See https://stackoverflow.com/questions/5587482 for more details. // Hibernate needs this in order to populate gracePeriods but no one else should ever use it @SuppressWarnings("unused") private void setInternalGracePeriods(Set<GracePeriod> gracePeriods) { if (this.gracePeriods instanceof PersistentSet) { Set<GracePeriod> nonNullGracePeriods = nullToEmpty(gracePeriods); this.gracePeriods.retainAll(nonNullGracePeriods); this.gracePeriods.addAll(nonNullGracePeriods); } else { this.gracePeriods = gracePeriods; } } // Hibernate needs this in order to populate dsData but no one else should ever use it @SuppressWarnings("unused") private void setInternalDelegationSignerData(Set<DomainDsData> dsData) { if (this.dsData instanceof PersistentSet) { Set<DomainDsData> nonNullDsData = nullToEmpty(dsData); this.dsData.retainAll(nonNullDsData); this.dsData.addAll(nonNullDsData); } else { this.dsData = dsData; } } public final String getCurrentSponsorRegistrarId() { return getPersistedCurrentSponsorRegistrarId(); } /** Returns true if DNS information should be published for the given domain. */ public boolean shouldPublishToDns() { return intersection(getStatusValues(), DNS_PUBLISHING_PROHIBITED_STATUSES).isEmpty(); } /** * Returns the Registry Grace Period Statuses for this domain. * * <p>This collects all statuses from the domain's {@link GracePeriod} entries and also adds the * PENDING_DELETE status if needed. */ public ImmutableSet<GracePeriodStatus> getGracePeriodStatuses() { Set<GracePeriodStatus> gracePeriodStatuses = new HashSet<>(); for (GracePeriod gracePeriod : getGracePeriods()) { gracePeriodStatuses.add(gracePeriod.getType()); } if (getStatusValues().contains(StatusValue.PENDING_DELETE) && !gracePeriodStatuses.contains(GracePeriodStatus.REDEMPTION)) { gracePeriodStatuses.add(GracePeriodStatus.PENDING_DELETE); } return ImmutableSet.copyOf(gracePeriodStatuses); } /** Returns the subset of grace periods having the specified type. */ public ImmutableSet<GracePeriod> getGracePeriodsOfType(GracePeriodStatus gracePeriodType) { ImmutableSet.Builder<GracePeriod> builder = new ImmutableSet.Builder<>(); for (GracePeriod gracePeriod : getGracePeriods()) { if (gracePeriod.getType() == gracePeriodType) { builder.add(gracePeriod); } } return builder.build(); } @Override public DomainBase cloneProjectedAtTime(final DateTime now) { return cloneDomainProjectedAtTime(this, now); } /** * The logic in this method, which handles implicit server approval of transfers, very closely * parallels the logic in {@code DomainTransferApproveFlow} which handles explicit client * approvals. */ static <T extends DomainBase> T cloneDomainProjectedAtTime(T domain, DateTime now) { DomainTransferData transferData = domain.getTransferData(); DateTime transferExpirationTime = transferData.getPendingTransferExpirationTime(); // If there's a pending transfer that has expired, handle it. if (TransferStatus.PENDING.equals(transferData.getTransferStatus()) && isBeforeOrAt(transferExpirationTime, now)) { // Project until just before the transfer time. This will handle the case of an autorenew // before the transfer was even requested or during the request period. // If the transfer time is precisely the moment that the domain expires, there will not be an // autorenew billing event (since we end the recurrence at transfer time and recurrences are // exclusive of their ending), and we can just proceed with the transfer. T domainAtTransferTime = cloneDomainProjectedAtTime(domain, transferExpirationTime.minusMillis(1)); DateTime expirationDate = transferData.getTransferredRegistrationExpirationTime(); if (expirationDate == null) { // Extend the registration by the correct number of years from the expiration time // that was current on the domain right before the transfer, capped at 10 years from // the moment of the transfer. expirationDate = ResourceFlowUtils.computeExDateForApprovalTime( domainAtTransferTime, transferExpirationTime, transferData.getTransferPeriod()); } // If we are within an autorenew grace period, the transfer will subsume the autorenew. There // will already be a cancellation written in advance by the transfer request flow, so we don't // need to worry about billing, but we do need to cancel out the expiration time increase. // The transfer period saved in the transfer data will be one year, unless the superuser // extension set the transfer period to zero. // Set the expiration, autorenew events, and grace period for the transfer. (Transfer ends // all other graces). Builder builder = domainAtTransferTime .asBuilder() .setRegistrationExpirationTime(expirationDate) // Set the speculatively-written new autorenew events as the domain's autorenew // events. .setAutorenewBillingEvent(transferData.getServerApproveAutorenewEvent()) .setAutorenewPollMessage(transferData.getServerApproveAutorenewPollMessage()) .setCurrentBulkToken(null); if (transferData.getTransferPeriod().getValue() == 1) { // Set the grace period using a key to the pre-scheduled transfer billing event. Not using // GracePeriod.forBillingEvent() here in order to avoid the actual fetch. builder.setGracePeriods( ImmutableSet.of( GracePeriod.create( GracePeriodStatus.TRANSFER, domain.getRepoId(), transferExpirationTime.plus( Tld.get(domain.getTld()).getTransferGracePeriodLength()), transferData.getGainingRegistrarId(), transferData.getServerApproveBillingEvent()))); } else { // There won't be a billing event, so we don't need a grace period builder.setGracePeriods(ImmutableSet.of()); } // Set all remaining transfer properties. setAutomaticTransferSuccessProperties(builder, transferData); builder .setLastEppUpdateTime(transferExpirationTime) .setLastEppUpdateRegistrarId(transferData.getGainingRegistrarId()); // Finish projecting to now. return (T) builder.build().cloneProjectedAtTime(now); } Optional<DateTime> newLastEppUpdateTime = Optional.empty(); // There is no transfer. Do any necessary autorenews for active domains. Builder builder = domain.asBuilder(); if (isBeforeOrAt(domain.getRegistrationExpirationTime(), now) && END_OF_TIME.equals(domain.getDeletionTime())) { // Autorenew by the number of years between the old expiration time and now. DateTime lastAutorenewTime = leapSafeAddYears( domain.getRegistrationExpirationTime(), new Interval(domain.getRegistrationExpirationTime(), now).toPeriod().getYears()); DateTime newExpirationTime = lastAutorenewTime.plusYears(1); builder .setRegistrationExpirationTime(newExpirationTime) .addGracePeriod( GracePeriod.createForRecurrence( GracePeriodStatus.AUTO_RENEW, domain.getRepoId(), lastAutorenewTime.plus(Tld.get(domain.getTld()).getAutoRenewGracePeriodLength()), domain.getCurrentSponsorRegistrarId(), domain.getAutorenewBillingEvent())); newLastEppUpdateTime = Optional.of(lastAutorenewTime); } // Remove any grace periods that have expired. T almostBuilt = (T) builder.build(); builder = almostBuilt.asBuilder(); for (GracePeriod gracePeriod : almostBuilt.getGracePeriods()) { if (isBeforeOrAt(gracePeriod.getExpirationTime(), now)) { builder.removeGracePeriod(gracePeriod); if (newLastEppUpdateTime.isEmpty() || isBeforeOrAt(newLastEppUpdateTime.get(), gracePeriod.getExpirationTime())) { newLastEppUpdateTime = Optional.of(gracePeriod.getExpirationTime()); } } } // It is possible that the lastEppUpdateRegistrarId is different from current sponsor client // id, so we have to do the comparison instead of having one variable just storing the most // recent time. if (newLastEppUpdateTime.isPresent()) { if (domain.getLastEppUpdateTime() == null || newLastEppUpdateTime.get().isAfter(domain.getLastEppUpdateTime())) { builder .setLastEppUpdateTime(newLastEppUpdateTime.get()) .setLastEppUpdateRegistrarId(domain.getCurrentSponsorRegistrarId()); } } // Handle common properties like setting or unsetting linked status. This also handles the // general case of pending transfers for other resource types, but since we've always handled // a pending transfer by this point that's a no-op for domains. projectResourceOntoBuilderAtTime(almostBuilt, builder, now); return (T) builder.build(); } /** Return what the expiration time would be if the given number of years were added to it. */ public static DateTime extendRegistrationWithCap( DateTime now, DateTime currentExpirationTime, @Nullable Integer extendedRegistrationYears) { // We must cap registration at the max years (aka 10), even if that truncates the last year. return earliestOf( leapSafeAddYears( currentExpirationTime, Optional.ofNullable(extendedRegistrationYears).orElse(0)), leapSafeAddYears(now, MAX_REGISTRATION_YEARS)); } /** Loads and returns the fully qualified host names of all linked nameservers. */ public ImmutableSortedSet<String> loadNameserverHostNames() { return tm().reTransact( () -> tm().loadByKeys(getNameservers()).values().stream() .map(Host::getHostName) .collect(toImmutableSortedSet(Ordering.natural()))); } /** A key to the registrant who registered this domain. */ public Optional<VKey<Contact>> getRegistrant() { return Optional.ofNullable(registrantContact); } public Optional<VKey<Contact>> getAdminContact() { return Optional.ofNullable(adminContact); } public Optional<VKey<Contact>> getBillingContact() { return Optional.ofNullable(billingContact); } public Optional<VKey<Contact>> getTechContact() { return Optional.ofNullable(techContact); } /** * Associated contacts for the domain (other than registrant). * * <p>Note: This can be an empty set if no contacts are present for the domain. */ public ImmutableSet<DesignatedContact> getContacts() { return getAllContacts(false); } /** * Gets all associated contacts for the domain, including the registrant. * * <p>Note: This can be an empty set if no contacts are present for the domain. */ public ImmutableSet<DesignatedContact> getAllContacts() { return getAllContacts(true); } @Nullable public DomainAuthInfo getAuthInfo() { return authInfo; } /** * Returns all referenced contacts from this domain. * * <p>Note: This can be an empty set if no contacts are present for the domain. */ public ImmutableSet<VKey<Contact>> getReferencedContacts() { return nullToEmptyImmutableCopy(getAllContacts(true)).stream() .map(DesignatedContact::getContactKey) .filter(Objects::nonNull) .collect(toImmutableSet()); } private ImmutableSet<DesignatedContact> getAllContacts(boolean includeRegistrant) { ImmutableSet.Builder<DesignatedContact> builder = new ImmutableSet.Builder<>(); if (includeRegistrant) { getRegistrant().ifPresent(c -> builder.add(DesignatedContact.create(Type.REGISTRANT, c))); } getAdminContact().ifPresent(c -> builder.add(DesignatedContact.create(Type.ADMIN, c))); getBillingContact().ifPresent(c -> builder.add(DesignatedContact.create(Type.BILLING, c))); getTechContact().ifPresent(c -> builder.add(DesignatedContact.create(Type.TECH, c))); return builder.build(); } public String getTld() { return tld; } /** * Sets the individual contact fields from {@code contacts}. * * <p>The registrant field is only set if {@code includeRegistrant} is true, as this field needs * to be set in some circumstances but not in others. */ void setContactFields(Set<DesignatedContact> contacts, boolean includeRegistrant) { // Set the individual contact fields. billingContact = null; techContact = null; adminContact = null; if (includeRegistrant) { registrantContact = null; } HashSet<Type> contactsDiscovered = new HashSet<>(); for (DesignatedContact contact : contacts) { checkArgument( !contactsDiscovered.contains(contact.getType()), "Duplicate contact type %s in designated contact set.", contact.getType()); contactsDiscovered.add(contact.getType()); switch (contact.getType()) { case BILLING -> billingContact = contact.getContactKey(); case TECH -> techContact = contact.getContactKey(); case ADMIN -> adminContact = contact.getContactKey(); case REGISTRANT -> { if (includeRegistrant) { registrantContact = contact.getContactKey(); } } default -> throw new IllegalArgumentException( "Unknown contact resource type: " + contact.getType()); } } } @Override public VKey<Domain> createVKey() { throw new UnsupportedOperationException( "DomainBase is not an actual persisted entity you can create a key to; use Domain instead"); } /** Predicate to determine if a given {@link DesignatedContact} is the registrant. */ static final Predicate<DesignatedContact> IS_REGISTRANT = (DesignatedContact contact) -> Type.REGISTRANT.equals(contact.type); /** An override of {@link EppResource#asBuilder} with tighter typing. */ @Override public Builder<? extends DomainBase, ?> asBuilder() { return new Builder<>(clone(this)); } /** A builder for constructing {@link Domain}, since it is immutable. */ public static class Builder<T extends DomainBase, B extends Builder<T, B>> extends EppResource.Builder<T, B> implements BuilderWithTransferData<DomainTransferData, B> { public Builder() {} Builder(T instance) { super(instance); } @Override public T build() { T instance = getInstance(); // If TransferData is totally empty, set it to null. if (DomainTransferData.EMPTY.equals(getInstance().transferData)) { setTransferData(null); } // A Domain has status INACTIVE if there are no nameservers. if (getInstance().getNameservers().isEmpty()) { addStatusValue(StatusValue.INACTIVE); } else { // There are nameservers, so make sure INACTIVE isn't there. removeStatusValue(StatusValue.INACTIVE); } // If there is no autorenew end time, set it to END_OF_TIME. instance.autorenewEndTime = firstNonNull(getInstance().autorenewEndTime, END_OF_TIME); checkArgumentNotNull(emptyToNull(instance.domainName), "Missing domainName"); instance.tld = getTldFromDomainName(instance.domainName); T newDomain = super.build(); // Hibernate throws exception if gracePeriods or dsData is null because we enabled all // cascadable operations and orphan removal. newDomain.gracePeriods = newDomain.gracePeriods == null ? ImmutableSet.of() : newDomain.gracePeriods; newDomain.dsData = newDomain.dsData == null ? ImmutableSet.of() : newDomain.dsData.stream() .map(ds -> ds.cloneWithDomainRepoId(instance.getRepoId())) .collect(toImmutableSet()); return newDomain; } public B setDomainName(String domainName) { checkArgument( domainName.equals(canonicalizeHostname(domainName)), "Domain name %s not in puny-coded, lower-case form", domainName); getInstance().domainName = domainName; return thisCastToDerived(); } public B setDsData(ImmutableSet<DomainDsData> dsData) { getInstance().dsData = dsData; getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } public B setRegistrant(Optional<VKey<Contact>> registrant) { // Set the registrant field specifically. getInstance().registrantContact = registrant.orElse(null); return thisCastToDerived(); } public B setAuthInfo(DomainAuthInfo authInfo) { getInstance().authInfo = authInfo; return thisCastToDerived(); } public B setNameservers(VKey<Host> nameserver) { getInstance().nsHosts = ImmutableSet.of(nameserver); getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } public B setNameservers(ImmutableSet<VKey<Host>> nameservers) { getInstance().nsHosts = forceEmptyToNull(nameservers); getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } public B addNameserver(VKey<Host> nameserver) { return addNameservers(ImmutableSet.of(nameserver)); } public B addNameservers(ImmutableSet<VKey<Host>> nameservers) { return setNameservers( ImmutableSet.copyOf(Sets.union(getInstance().getNameservers(), nameservers))); } public B removeNameserver(VKey<Host> nameserver) { return removeNameservers(ImmutableSet.of(nameserver)); } public B removeNameservers(ImmutableSet<VKey<Host>> nameservers) { return setNameservers( ImmutableSet.copyOf(difference(getInstance().getNameservers(), nameservers))); } public B setContacts(DesignatedContact contact) { return setContacts(ImmutableSet.of(contact)); } public B setContacts(ImmutableSet<DesignatedContact> contacts) { checkArgument(contacts.stream().noneMatch(IS_REGISTRANT), "Registrant cannot be a contact"); // Set the individual fields. getInstance().setContactFields(contacts, false); return thisCastToDerived(); } public B addContacts(ImmutableSet<DesignatedContact> contacts) { return setContacts(ImmutableSet.copyOf(Sets.union(getInstance().getContacts(), contacts))); } public B removeContacts(ImmutableSet<DesignatedContact> contacts) { return setContacts(ImmutableSet.copyOf(difference(getInstance().getContacts(), contacts))); } public B setLaunchNotice(LaunchNotice launchNotice) { getInstance().launchNotice = launchNotice; return thisCastToDerived(); } public B setIdnTableName(String idnTableName) { getInstance().idnTableName = idnTableName; return thisCastToDerived(); } public B setSubordinateHosts(ImmutableSet<String> subordinateHosts) { getInstance().subordinateHosts = subordinateHosts; return thisCastToDerived(); } public B addSubordinateHost(String hostToAdd) { return setSubordinateHosts( ImmutableSet.copyOf(union(getInstance().getSubordinateHosts(), hostToAdd))); } public B removeSubordinateHost(String hostToRemove) { return setSubordinateHosts( ImmutableSet.copyOf( CollectionUtils.difference(getInstance().getSubordinateHosts(), hostToRemove))); } public B setRegistrationExpirationTime(DateTime registrationExpirationTime) { getInstance().registrationExpirationTime = registrationExpirationTime; return thisCastToDerived(); } public B setDeletePollMessage(VKey<OneTime> deletePollMessage) { getInstance().deletePollMessage = deletePollMessage; return thisCastToDerived(); } public B setAutorenewBillingEvent(VKey<BillingRecurrence> autorenewBillingEvent) { getInstance().autorenewBillingEvent = autorenewBillingEvent; return thisCastToDerived(); } public B setAutorenewPollMessage(@Nullable VKey<Autorenew> autorenewPollMessage) { getInstance().autorenewPollMessage = autorenewPollMessage; return thisCastToDerived(); } public B setLordnPhase(LordnPhase lordnPhase) { getInstance().lordnPhase = lordnPhase; return thisCastToDerived(); } public B setSmdId(String smdId) { getInstance().smdId = smdId; return thisCastToDerived(); } public B setGracePeriods(ImmutableSet<GracePeriod> gracePeriods) { getInstance().gracePeriods = gracePeriods; getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } public B addGracePeriod(GracePeriod gracePeriod) { getInstance().gracePeriods = union(getInstance().getGracePeriods(), gracePeriod); getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } public B removeGracePeriod(GracePeriod gracePeriod) { getInstance().gracePeriods = CollectionUtils.difference(getInstance().getGracePeriods(), gracePeriod); getInstance().resetUpdateTimestamp(); return thisCastToDerived(); } /** * Sets the autorenew end time, or clears it if empty is passed. * * <p>Note that {@link DateTimeUtils#END_OF_TIME} is used as a sentinel value in the database * representation to signify that autorenew doesn't end, and is mapped to empty here for the * purposes of more legible business logic. */ public B setAutorenewEndTime(Optional<DateTime> autorenewEndTime) { getInstance().autorenewEndTime = autorenewEndTime.orElse(END_OF_TIME); return thisCastToDerived(); } @Override public B setTransferData(DomainTransferData transferData) { getInstance().transferData = transferData; return thisCastToDerived(); } @Override public B setLastTransferTime(DateTime lastTransferTime) { getInstance().lastTransferTime = lastTransferTime; return thisCastToDerived(); } public B setCurrentBulkToken(@Nullable VKey<AllocationToken> currentBulkToken) { if (currentBulkToken == null) { getInstance().currentBulkToken = currentBulkToken; return thisCastToDerived(); } AllocationToken token = tm().transact(() -> tm().loadByKeyIfPresent(currentBulkToken)) .orElseThrow( () -> new IllegalArgumentException( String.format( "The bulk token %s does not exist", currentBulkToken.getKey()))); checkArgument( token.getTokenType().equals(TokenType.BULK_PRICING), "The currentBulkToken must have a BULK_PRICING TokenType"); getInstance().currentBulkToken = currentBulkToken; return thisCastToDerived(); } } }
googleads/google-ads-java
38,016
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/PlannableUserList.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/reach_plan_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * A plannable user list. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.PlannableUserList} */ public final class PlannableUserList extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.PlannableUserList) PlannableUserListOrBuilder { private static final long serialVersionUID = 0L; // Use PlannableUserList.newBuilder() to construct. private PlannableUserList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PlannableUserList() { displayName_ = ""; userListType_ = 0; plannableStatus_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PlannableUserList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v20_services_PlannableUserList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v20_services_PlannableUserList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.PlannableUserList.class, com.google.ads.googleads.v20.services.PlannableUserList.Builder.class); } private int bitField0_; public static final int USER_LIST_INFO_FIELD_NUMBER = 1; private com.google.ads.googleads.v20.common.UserListInfo userListInfo_; /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> * @return Whether the userListInfo field is set. */ @java.lang.Override public boolean hasUserListInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> * @return The userListInfo. */ @java.lang.Override public com.google.ads.googleads.v20.common.UserListInfo getUserListInfo() { return userListInfo_ == null ? com.google.ads.googleads.v20.common.UserListInfo.getDefaultInstance() : userListInfo_; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.UserListInfoOrBuilder getUserListInfoOrBuilder() { return userListInfo_ == null ? com.google.ads.googleads.v20.common.UserListInfo.getDefaultInstance() : userListInfo_; } public static final int DISPLAY_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_LIST_TYPE_FIELD_NUMBER = 3; private int userListType_ = 0; /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The enum numeric value on the wire for userListType. */ @java.lang.Override public int getUserListTypeValue() { return userListType_; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The userListType. */ @java.lang.Override public com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType getUserListType() { com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType result = com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.forNumber(userListType_); return result == null ? com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.UNRECOGNIZED : result; } public static final int PLANNABLE_STATUS_FIELD_NUMBER = 4; private int plannableStatus_ = 0; /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The enum numeric value on the wire for plannableStatus. */ @java.lang.Override public int getPlannableStatusValue() { return plannableStatus_; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The plannableStatus. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus getPlannableStatus() { com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus result = com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.forNumber(plannableStatus_); return result == null ? com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUserListInfo()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_); } if (userListType_ != com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.UNSPECIFIED.getNumber()) { output.writeEnum(3, userListType_); } if (plannableStatus_ != com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNSPECIFIED.getNumber()) { output.writeEnum(4, plannableStatus_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getUserListInfo()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_); } if (userListType_ != com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, userListType_); } if (plannableStatus_ != com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, plannableStatus_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.PlannableUserList)) { return super.equals(obj); } com.google.ads.googleads.v20.services.PlannableUserList other = (com.google.ads.googleads.v20.services.PlannableUserList) obj; if (hasUserListInfo() != other.hasUserListInfo()) return false; if (hasUserListInfo()) { if (!getUserListInfo() .equals(other.getUserListInfo())) return false; } if (!getDisplayName() .equals(other.getDisplayName())) return false; if (userListType_ != other.userListType_) return false; if (plannableStatus_ != other.plannableStatus_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUserListInfo()) { hash = (37 * hash) + USER_LIST_INFO_FIELD_NUMBER; hash = (53 * hash) + getUserListInfo().hashCode(); } hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + USER_LIST_TYPE_FIELD_NUMBER; hash = (53 * hash) + userListType_; hash = (37 * hash) + PLANNABLE_STATUS_FIELD_NUMBER; hash = (53 * hash) + plannableStatus_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.PlannableUserList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.PlannableUserList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.PlannableUserList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.PlannableUserList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A plannable user list. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.PlannableUserList} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.PlannableUserList) com.google.ads.googleads.v20.services.PlannableUserListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v20_services_PlannableUserList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v20_services_PlannableUserList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.PlannableUserList.class, com.google.ads.googleads.v20.services.PlannableUserList.Builder.class); } // Construct using com.google.ads.googleads.v20.services.PlannableUserList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUserListInfoFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; userListInfo_ = null; if (userListInfoBuilder_ != null) { userListInfoBuilder_.dispose(); userListInfoBuilder_ = null; } displayName_ = ""; userListType_ = 0; plannableStatus_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v20_services_PlannableUserList_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.PlannableUserList getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.PlannableUserList.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.PlannableUserList build() { com.google.ads.googleads.v20.services.PlannableUserList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.PlannableUserList buildPartial() { com.google.ads.googleads.v20.services.PlannableUserList result = new com.google.ads.googleads.v20.services.PlannableUserList(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.PlannableUserList result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.userListInfo_ = userListInfoBuilder_ == null ? userListInfo_ : userListInfoBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.userListType_ = userListType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.plannableStatus_ = plannableStatus_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.PlannableUserList) { return mergeFrom((com.google.ads.googleads.v20.services.PlannableUserList)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.PlannableUserList other) { if (other == com.google.ads.googleads.v20.services.PlannableUserList.getDefaultInstance()) return this; if (other.hasUserListInfo()) { mergeUserListInfo(other.getUserListInfo()); } if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000002; onChanged(); } if (other.userListType_ != 0) { setUserListTypeValue(other.getUserListTypeValue()); } if (other.plannableStatus_ != 0) { setPlannableStatusValue(other.getPlannableStatusValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUserListInfoFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { userListType_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { plannableStatus_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.ads.googleads.v20.common.UserListInfo userListInfo_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.UserListInfo, com.google.ads.googleads.v20.common.UserListInfo.Builder, com.google.ads.googleads.v20.common.UserListInfoOrBuilder> userListInfoBuilder_; /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> * @return Whether the userListInfo field is set. */ public boolean hasUserListInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> * @return The userListInfo. */ public com.google.ads.googleads.v20.common.UserListInfo getUserListInfo() { if (userListInfoBuilder_ == null) { return userListInfo_ == null ? com.google.ads.googleads.v20.common.UserListInfo.getDefaultInstance() : userListInfo_; } else { return userListInfoBuilder_.getMessage(); } } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public Builder setUserListInfo(com.google.ads.googleads.v20.common.UserListInfo value) { if (userListInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userListInfo_ = value; } else { userListInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public Builder setUserListInfo( com.google.ads.googleads.v20.common.UserListInfo.Builder builderForValue) { if (userListInfoBuilder_ == null) { userListInfo_ = builderForValue.build(); } else { userListInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public Builder mergeUserListInfo(com.google.ads.googleads.v20.common.UserListInfo value) { if (userListInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && userListInfo_ != null && userListInfo_ != com.google.ads.googleads.v20.common.UserListInfo.getDefaultInstance()) { getUserListInfoBuilder().mergeFrom(value); } else { userListInfo_ = value; } } else { userListInfoBuilder_.mergeFrom(value); } if (userListInfo_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public Builder clearUserListInfo() { bitField0_ = (bitField0_ & ~0x00000001); userListInfo_ = null; if (userListInfoBuilder_ != null) { userListInfoBuilder_.dispose(); userListInfoBuilder_ = null; } onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public com.google.ads.googleads.v20.common.UserListInfo.Builder getUserListInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserListInfoFieldBuilder().getBuilder(); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ public com.google.ads.googleads.v20.common.UserListInfoOrBuilder getUserListInfoOrBuilder() { if (userListInfoBuilder_ != null) { return userListInfoBuilder_.getMessageOrBuilder(); } else { return userListInfo_ == null ? com.google.ads.googleads.v20.common.UserListInfo.getDefaultInstance() : userListInfo_; } } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v20.common.UserListInfo user_list_info = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.UserListInfo, com.google.ads.googleads.v20.common.UserListInfo.Builder, com.google.ads.googleads.v20.common.UserListInfoOrBuilder> getUserListInfoFieldBuilder() { if (userListInfoBuilder_ == null) { userListInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.UserListInfo, com.google.ads.googleads.v20.common.UserListInfo.Builder, com.google.ads.googleads.v20.common.UserListInfoOrBuilder>( getUserListInfo(), getParentForChildren(), isClean()); userListInfo_ = null; } return userListInfoBuilder_; } private java.lang.Object displayName_ = ""; /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int userListType_ = 0; /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The enum numeric value on the wire for userListType. */ @java.lang.Override public int getUserListTypeValue() { return userListType_; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @param value The enum numeric value on the wire for userListType to set. * @return This builder for chaining. */ public Builder setUserListTypeValue(int value) { userListType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The userListType. */ @java.lang.Override public com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType getUserListType() { com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType result = com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.forNumber(userListType_); return result == null ? com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType.UNRECOGNIZED : result; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @param value The userListType to set. * @return This builder for chaining. */ public Builder setUserListType(com.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; userListType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v20.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return This builder for chaining. */ public Builder clearUserListType() { bitField0_ = (bitField0_ & ~0x00000004); userListType_ = 0; onChanged(); return this; } private int plannableStatus_ = 0; /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The enum numeric value on the wire for plannableStatus. */ @java.lang.Override public int getPlannableStatusValue() { return plannableStatus_; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @param value The enum numeric value on the wire for plannableStatus to set. * @return This builder for chaining. */ public Builder setPlannableStatusValue(int value) { plannableStatus_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The plannableStatus. */ @java.lang.Override public com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus getPlannableStatus() { com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus result = com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.forNumber(plannableStatus_); return result == null ? com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNRECOGNIZED : result; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @param value The plannableStatus to set. * @return This builder for chaining. */ public Builder setPlannableStatus(com.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; plannableStatus_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v20.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return This builder for chaining. */ public Builder clearPlannableStatus() { bitField0_ = (bitField0_ & ~0x00000008); plannableStatus_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.PlannableUserList) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.PlannableUserList) private static final com.google.ads.googleads.v20.services.PlannableUserList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.PlannableUserList(); } public static com.google.ads.googleads.v20.services.PlannableUserList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PlannableUserList> PARSER = new com.google.protobuf.AbstractParser<PlannableUserList>() { @java.lang.Override public PlannableUserList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PlannableUserList> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PlannableUserList> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.PlannableUserList getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
38,016
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/PlannableUserList.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/reach_plan_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * A plannable user list. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.PlannableUserList} */ public final class PlannableUserList extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.PlannableUserList) PlannableUserListOrBuilder { private static final long serialVersionUID = 0L; // Use PlannableUserList.newBuilder() to construct. private PlannableUserList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PlannableUserList() { displayName_ = ""; userListType_ = 0; plannableStatus_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PlannableUserList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_PlannableUserList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_PlannableUserList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.PlannableUserList.class, com.google.ads.googleads.v21.services.PlannableUserList.Builder.class); } private int bitField0_; public static final int USER_LIST_INFO_FIELD_NUMBER = 1; private com.google.ads.googleads.v21.common.UserListInfo userListInfo_; /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> * @return Whether the userListInfo field is set. */ @java.lang.Override public boolean hasUserListInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> * @return The userListInfo. */ @java.lang.Override public com.google.ads.googleads.v21.common.UserListInfo getUserListInfo() { return userListInfo_ == null ? com.google.ads.googleads.v21.common.UserListInfo.getDefaultInstance() : userListInfo_; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.UserListInfoOrBuilder getUserListInfoOrBuilder() { return userListInfo_ == null ? com.google.ads.googleads.v21.common.UserListInfo.getDefaultInstance() : userListInfo_; } public static final int DISPLAY_NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_LIST_TYPE_FIELD_NUMBER = 3; private int userListType_ = 0; /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The enum numeric value on the wire for userListType. */ @java.lang.Override public int getUserListTypeValue() { return userListType_; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The userListType. */ @java.lang.Override public com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType getUserListType() { com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType result = com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.forNumber(userListType_); return result == null ? com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.UNRECOGNIZED : result; } public static final int PLANNABLE_STATUS_FIELD_NUMBER = 4; private int plannableStatus_ = 0; /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The enum numeric value on the wire for plannableStatus. */ @java.lang.Override public int getPlannableStatusValue() { return plannableStatus_; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The plannableStatus. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus getPlannableStatus() { com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus result = com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.forNumber(plannableStatus_); return result == null ? com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUserListInfo()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_); } if (userListType_ != com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.UNSPECIFIED.getNumber()) { output.writeEnum(3, userListType_); } if (plannableStatus_ != com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNSPECIFIED.getNumber()) { output.writeEnum(4, plannableStatus_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getUserListInfo()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_); } if (userListType_ != com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, userListType_); } if (plannableStatus_ != com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, plannableStatus_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.PlannableUserList)) { return super.equals(obj); } com.google.ads.googleads.v21.services.PlannableUserList other = (com.google.ads.googleads.v21.services.PlannableUserList) obj; if (hasUserListInfo() != other.hasUserListInfo()) return false; if (hasUserListInfo()) { if (!getUserListInfo() .equals(other.getUserListInfo())) return false; } if (!getDisplayName() .equals(other.getDisplayName())) return false; if (userListType_ != other.userListType_) return false; if (plannableStatus_ != other.plannableStatus_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUserListInfo()) { hash = (37 * hash) + USER_LIST_INFO_FIELD_NUMBER; hash = (53 * hash) + getUserListInfo().hashCode(); } hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + USER_LIST_TYPE_FIELD_NUMBER; hash = (53 * hash) + userListType_; hash = (37 * hash) + PLANNABLE_STATUS_FIELD_NUMBER; hash = (53 * hash) + plannableStatus_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.PlannableUserList parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.PlannableUserList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.PlannableUserList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.PlannableUserList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A plannable user list. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.PlannableUserList} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.PlannableUserList) com.google.ads.googleads.v21.services.PlannableUserListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_PlannableUserList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_PlannableUserList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.PlannableUserList.class, com.google.ads.googleads.v21.services.PlannableUserList.Builder.class); } // Construct using com.google.ads.googleads.v21.services.PlannableUserList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUserListInfoFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; userListInfo_ = null; if (userListInfoBuilder_ != null) { userListInfoBuilder_.dispose(); userListInfoBuilder_ = null; } displayName_ = ""; userListType_ = 0; plannableStatus_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_PlannableUserList_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.PlannableUserList getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.PlannableUserList.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.PlannableUserList build() { com.google.ads.googleads.v21.services.PlannableUserList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.PlannableUserList buildPartial() { com.google.ads.googleads.v21.services.PlannableUserList result = new com.google.ads.googleads.v21.services.PlannableUserList(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.PlannableUserList result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.userListInfo_ = userListInfoBuilder_ == null ? userListInfo_ : userListInfoBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.userListType_ = userListType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.plannableStatus_ = plannableStatus_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.PlannableUserList) { return mergeFrom((com.google.ads.googleads.v21.services.PlannableUserList)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.PlannableUserList other) { if (other == com.google.ads.googleads.v21.services.PlannableUserList.getDefaultInstance()) return this; if (other.hasUserListInfo()) { mergeUserListInfo(other.getUserListInfo()); } if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000002; onChanged(); } if (other.userListType_ != 0) { setUserListTypeValue(other.getUserListTypeValue()); } if (other.plannableStatus_ != 0) { setPlannableStatusValue(other.getPlannableStatusValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUserListInfoFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { userListType_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { plannableStatus_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.ads.googleads.v21.common.UserListInfo userListInfo_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.UserListInfo, com.google.ads.googleads.v21.common.UserListInfo.Builder, com.google.ads.googleads.v21.common.UserListInfoOrBuilder> userListInfoBuilder_; /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> * @return Whether the userListInfo field is set. */ public boolean hasUserListInfo() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> * @return The userListInfo. */ public com.google.ads.googleads.v21.common.UserListInfo getUserListInfo() { if (userListInfoBuilder_ == null) { return userListInfo_ == null ? com.google.ads.googleads.v21.common.UserListInfo.getDefaultInstance() : userListInfo_; } else { return userListInfoBuilder_.getMessage(); } } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public Builder setUserListInfo(com.google.ads.googleads.v21.common.UserListInfo value) { if (userListInfoBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userListInfo_ = value; } else { userListInfoBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public Builder setUserListInfo( com.google.ads.googleads.v21.common.UserListInfo.Builder builderForValue) { if (userListInfoBuilder_ == null) { userListInfo_ = builderForValue.build(); } else { userListInfoBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public Builder mergeUserListInfo(com.google.ads.googleads.v21.common.UserListInfo value) { if (userListInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && userListInfo_ != null && userListInfo_ != com.google.ads.googleads.v21.common.UserListInfo.getDefaultInstance()) { getUserListInfoBuilder().mergeFrom(value); } else { userListInfo_ = value; } } else { userListInfoBuilder_.mergeFrom(value); } if (userListInfo_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public Builder clearUserListInfo() { bitField0_ = (bitField0_ & ~0x00000001); userListInfo_ = null; if (userListInfoBuilder_ != null) { userListInfoBuilder_.dispose(); userListInfoBuilder_ = null; } onChanged(); return this; } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public com.google.ads.googleads.v21.common.UserListInfo.Builder getUserListInfoBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUserListInfoFieldBuilder().getBuilder(); } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ public com.google.ads.googleads.v21.common.UserListInfoOrBuilder getUserListInfoOrBuilder() { if (userListInfoBuilder_ != null) { return userListInfoBuilder_.getMessageOrBuilder(); } else { return userListInfo_ == null ? com.google.ads.googleads.v21.common.UserListInfo.getDefaultInstance() : userListInfo_; } } /** * <pre> * The user list ID. * </pre> * * <code>.google.ads.googleads.v21.common.UserListInfo user_list_info = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.UserListInfo, com.google.ads.googleads.v21.common.UserListInfo.Builder, com.google.ads.googleads.v21.common.UserListInfoOrBuilder> getUserListInfoFieldBuilder() { if (userListInfoBuilder_ == null) { userListInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.UserListInfo, com.google.ads.googleads.v21.common.UserListInfo.Builder, com.google.ads.googleads.v21.common.UserListInfoOrBuilder>( getUserListInfo(), getParentForChildren(), isClean()); userListInfo_ = null; } return userListInfoBuilder_; } private java.lang.Object displayName_ = ""; /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * The name of the user list. * </pre> * * <code>string display_name = 2;</code> * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int userListType_ = 0; /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The enum numeric value on the wire for userListType. */ @java.lang.Override public int getUserListTypeValue() { return userListType_; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @param value The enum numeric value on the wire for userListType to set. * @return This builder for chaining. */ public Builder setUserListTypeValue(int value) { userListType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return The userListType. */ @java.lang.Override public com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType getUserListType() { com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType result = com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.forNumber(userListType_); return result == null ? com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType.UNRECOGNIZED : result; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @param value The userListType to set. * @return This builder for chaining. */ public Builder setUserListType(com.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; userListType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The user list type. * </pre> * * <code>.google.ads.googleads.v21.enums.UserListTypeEnum.UserListType user_list_type = 3;</code> * @return This builder for chaining. */ public Builder clearUserListType() { bitField0_ = (bitField0_ & ~0x00000004); userListType_ = 0; onChanged(); return this; } private int plannableStatus_ = 0; /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The enum numeric value on the wire for plannableStatus. */ @java.lang.Override public int getPlannableStatusValue() { return plannableStatus_; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @param value The enum numeric value on the wire for plannableStatus to set. * @return This builder for chaining. */ public Builder setPlannableStatusValue(int value) { plannableStatus_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return The plannableStatus. */ @java.lang.Override public com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus getPlannableStatus() { com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus result = com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.forNumber(plannableStatus_); return result == null ? com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus.UNRECOGNIZED : result; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @param value The plannableStatus to set. * @return This builder for chaining. */ public Builder setPlannableStatus(com.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; plannableStatus_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The plannable status of the user list. * </pre> * * <code>.google.ads.googleads.v21.enums.ReachPlanPlannableUserListStatusEnum.ReachPlanPlannableUserListStatus plannable_status = 4;</code> * @return This builder for chaining. */ public Builder clearPlannableStatus() { bitField0_ = (bitField0_ & ~0x00000008); plannableStatus_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.PlannableUserList) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.PlannableUserList) private static final com.google.ads.googleads.v21.services.PlannableUserList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.PlannableUserList(); } public static com.google.ads.googleads.v21.services.PlannableUserList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PlannableUserList> PARSER = new com.google.protobuf.AbstractParser<PlannableUserList>() { @java.lang.Override public PlannableUserList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PlannableUserList> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PlannableUserList> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.PlannableUserList getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/tapestry-5
37,780
plastic/src/external/java/org/apache/tapestry5/internal/plastic/asm/commons/InstructionAdapter.java
// ASM: a very small and fast Java bytecode manipulation framework // Copyright (c) 2000-2011 INRIA, France Telecom // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. Neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. package org.apache.tapestry5.internal.plastic.asm.commons; import org.apache.tapestry5.internal.plastic.asm.ConstantDynamic; import org.apache.tapestry5.internal.plastic.asm.Handle; import org.apache.tapestry5.internal.plastic.asm.Label; import org.apache.tapestry5.internal.plastic.asm.MethodVisitor; import org.apache.tapestry5.internal.plastic.asm.Opcodes; import org.apache.tapestry5.internal.plastic.asm.Type; /** * A {@link MethodVisitor} providing a more detailed API to generate and transform instructions. * * @author Eric Bruneton */ public class InstructionAdapter extends MethodVisitor { /** The type of the java.lang.Object class. */ public static final Type OBJECT_TYPE = Type.getType("Ljava/lang/Object;"); /** * Constructs a new {@link InstructionAdapter}. <i>Subclasses must not use this constructor</i>. * Instead, they must use the {@link #InstructionAdapter(int, MethodVisitor)} version. * * @param methodVisitor the method visitor to which this adapter delegates calls. * @throws IllegalStateException If a subclass calls this constructor. */ public InstructionAdapter(final MethodVisitor methodVisitor) { this(/* latest api = */ Opcodes.ASM9, methodVisitor); if (getClass() != InstructionAdapter.class) { throw new IllegalStateException(); } } /** * Constructs a new {@link InstructionAdapter}. * * @param api the ASM API version implemented by this visitor. Must be one of the {@code * ASM}<i>x</i> values in {@link Opcodes}. * @param methodVisitor the method visitor to which this adapter delegates calls. */ protected InstructionAdapter(final int api, final MethodVisitor methodVisitor) { super(api, methodVisitor); } @Override public void visitInsn(final int opcode) { switch (opcode) { case Opcodes.NOP: nop(); break; case Opcodes.ACONST_NULL: aconst(null); break; case Opcodes.ICONST_M1: case Opcodes.ICONST_0: case Opcodes.ICONST_1: case Opcodes.ICONST_2: case Opcodes.ICONST_3: case Opcodes.ICONST_4: case Opcodes.ICONST_5: iconst(opcode - Opcodes.ICONST_0); break; case Opcodes.LCONST_0: case Opcodes.LCONST_1: lconst((long) (opcode - Opcodes.LCONST_0)); break; case Opcodes.FCONST_0: case Opcodes.FCONST_1: case Opcodes.FCONST_2: fconst((float) (opcode - Opcodes.FCONST_0)); break; case Opcodes.DCONST_0: case Opcodes.DCONST_1: dconst((double) (opcode - Opcodes.DCONST_0)); break; case Opcodes.IALOAD: aload(Type.INT_TYPE); break; case Opcodes.LALOAD: aload(Type.LONG_TYPE); break; case Opcodes.FALOAD: aload(Type.FLOAT_TYPE); break; case Opcodes.DALOAD: aload(Type.DOUBLE_TYPE); break; case Opcodes.AALOAD: aload(OBJECT_TYPE); break; case Opcodes.BALOAD: aload(Type.BYTE_TYPE); break; case Opcodes.CALOAD: aload(Type.CHAR_TYPE); break; case Opcodes.SALOAD: aload(Type.SHORT_TYPE); break; case Opcodes.IASTORE: astore(Type.INT_TYPE); break; case Opcodes.LASTORE: astore(Type.LONG_TYPE); break; case Opcodes.FASTORE: astore(Type.FLOAT_TYPE); break; case Opcodes.DASTORE: astore(Type.DOUBLE_TYPE); break; case Opcodes.AASTORE: astore(OBJECT_TYPE); break; case Opcodes.BASTORE: astore(Type.BYTE_TYPE); break; case Opcodes.CASTORE: astore(Type.CHAR_TYPE); break; case Opcodes.SASTORE: astore(Type.SHORT_TYPE); break; case Opcodes.POP: pop(); break; case Opcodes.POP2: pop2(); break; case Opcodes.DUP: dup(); break; case Opcodes.DUP_X1: dupX1(); break; case Opcodes.DUP_X2: dupX2(); break; case Opcodes.DUP2: dup2(); break; case Opcodes.DUP2_X1: dup2X1(); break; case Opcodes.DUP2_X2: dup2X2(); break; case Opcodes.SWAP: swap(); break; case Opcodes.IADD: add(Type.INT_TYPE); break; case Opcodes.LADD: add(Type.LONG_TYPE); break; case Opcodes.FADD: add(Type.FLOAT_TYPE); break; case Opcodes.DADD: add(Type.DOUBLE_TYPE); break; case Opcodes.ISUB: sub(Type.INT_TYPE); break; case Opcodes.LSUB: sub(Type.LONG_TYPE); break; case Opcodes.FSUB: sub(Type.FLOAT_TYPE); break; case Opcodes.DSUB: sub(Type.DOUBLE_TYPE); break; case Opcodes.IMUL: mul(Type.INT_TYPE); break; case Opcodes.LMUL: mul(Type.LONG_TYPE); break; case Opcodes.FMUL: mul(Type.FLOAT_TYPE); break; case Opcodes.DMUL: mul(Type.DOUBLE_TYPE); break; case Opcodes.IDIV: div(Type.INT_TYPE); break; case Opcodes.LDIV: div(Type.LONG_TYPE); break; case Opcodes.FDIV: div(Type.FLOAT_TYPE); break; case Opcodes.DDIV: div(Type.DOUBLE_TYPE); break; case Opcodes.IREM: rem(Type.INT_TYPE); break; case Opcodes.LREM: rem(Type.LONG_TYPE); break; case Opcodes.FREM: rem(Type.FLOAT_TYPE); break; case Opcodes.DREM: rem(Type.DOUBLE_TYPE); break; case Opcodes.INEG: neg(Type.INT_TYPE); break; case Opcodes.LNEG: neg(Type.LONG_TYPE); break; case Opcodes.FNEG: neg(Type.FLOAT_TYPE); break; case Opcodes.DNEG: neg(Type.DOUBLE_TYPE); break; case Opcodes.ISHL: shl(Type.INT_TYPE); break; case Opcodes.LSHL: shl(Type.LONG_TYPE); break; case Opcodes.ISHR: shr(Type.INT_TYPE); break; case Opcodes.LSHR: shr(Type.LONG_TYPE); break; case Opcodes.IUSHR: ushr(Type.INT_TYPE); break; case Opcodes.LUSHR: ushr(Type.LONG_TYPE); break; case Opcodes.IAND: and(Type.INT_TYPE); break; case Opcodes.LAND: and(Type.LONG_TYPE); break; case Opcodes.IOR: or(Type.INT_TYPE); break; case Opcodes.LOR: or(Type.LONG_TYPE); break; case Opcodes.IXOR: xor(Type.INT_TYPE); break; case Opcodes.LXOR: xor(Type.LONG_TYPE); break; case Opcodes.I2L: cast(Type.INT_TYPE, Type.LONG_TYPE); break; case Opcodes.I2F: cast(Type.INT_TYPE, Type.FLOAT_TYPE); break; case Opcodes.I2D: cast(Type.INT_TYPE, Type.DOUBLE_TYPE); break; case Opcodes.L2I: cast(Type.LONG_TYPE, Type.INT_TYPE); break; case Opcodes.L2F: cast(Type.LONG_TYPE, Type.FLOAT_TYPE); break; case Opcodes.L2D: cast(Type.LONG_TYPE, Type.DOUBLE_TYPE); break; case Opcodes.F2I: cast(Type.FLOAT_TYPE, Type.INT_TYPE); break; case Opcodes.F2L: cast(Type.FLOAT_TYPE, Type.LONG_TYPE); break; case Opcodes.F2D: cast(Type.FLOAT_TYPE, Type.DOUBLE_TYPE); break; case Opcodes.D2I: cast(Type.DOUBLE_TYPE, Type.INT_TYPE); break; case Opcodes.D2L: cast(Type.DOUBLE_TYPE, Type.LONG_TYPE); break; case Opcodes.D2F: cast(Type.DOUBLE_TYPE, Type.FLOAT_TYPE); break; case Opcodes.I2B: cast(Type.INT_TYPE, Type.BYTE_TYPE); break; case Opcodes.I2C: cast(Type.INT_TYPE, Type.CHAR_TYPE); break; case Opcodes.I2S: cast(Type.INT_TYPE, Type.SHORT_TYPE); break; case Opcodes.LCMP: lcmp(); break; case Opcodes.FCMPL: cmpl(Type.FLOAT_TYPE); break; case Opcodes.FCMPG: cmpg(Type.FLOAT_TYPE); break; case Opcodes.DCMPL: cmpl(Type.DOUBLE_TYPE); break; case Opcodes.DCMPG: cmpg(Type.DOUBLE_TYPE); break; case Opcodes.IRETURN: areturn(Type.INT_TYPE); break; case Opcodes.LRETURN: areturn(Type.LONG_TYPE); break; case Opcodes.FRETURN: areturn(Type.FLOAT_TYPE); break; case Opcodes.DRETURN: areturn(Type.DOUBLE_TYPE); break; case Opcodes.ARETURN: areturn(OBJECT_TYPE); break; case Opcodes.RETURN: areturn(Type.VOID_TYPE); break; case Opcodes.ARRAYLENGTH: arraylength(); break; case Opcodes.ATHROW: athrow(); break; case Opcodes.MONITORENTER: monitorenter(); break; case Opcodes.MONITOREXIT: monitorexit(); break; default: throw new IllegalArgumentException(); } } @Override public void visitIntInsn(final int opcode, final int operand) { switch (opcode) { case Opcodes.BIPUSH: iconst(operand); break; case Opcodes.SIPUSH: iconst(operand); break; case Opcodes.NEWARRAY: switch (operand) { case Opcodes.T_BOOLEAN: newarray(Type.BOOLEAN_TYPE); break; case Opcodes.T_CHAR: newarray(Type.CHAR_TYPE); break; case Opcodes.T_BYTE: newarray(Type.BYTE_TYPE); break; case Opcodes.T_SHORT: newarray(Type.SHORT_TYPE); break; case Opcodes.T_INT: newarray(Type.INT_TYPE); break; case Opcodes.T_FLOAT: newarray(Type.FLOAT_TYPE); break; case Opcodes.T_LONG: newarray(Type.LONG_TYPE); break; case Opcodes.T_DOUBLE: newarray(Type.DOUBLE_TYPE); break; default: throw new IllegalArgumentException(); } break; default: throw new IllegalArgumentException(); } } @Override public void visitVarInsn(final int opcode, final int varIndex) { switch (opcode) { case Opcodes.ILOAD: load(varIndex, Type.INT_TYPE); break; case Opcodes.LLOAD: load(varIndex, Type.LONG_TYPE); break; case Opcodes.FLOAD: load(varIndex, Type.FLOAT_TYPE); break; case Opcodes.DLOAD: load(varIndex, Type.DOUBLE_TYPE); break; case Opcodes.ALOAD: load(varIndex, OBJECT_TYPE); break; case Opcodes.ISTORE: store(varIndex, Type.INT_TYPE); break; case Opcodes.LSTORE: store(varIndex, Type.LONG_TYPE); break; case Opcodes.FSTORE: store(varIndex, Type.FLOAT_TYPE); break; case Opcodes.DSTORE: store(varIndex, Type.DOUBLE_TYPE); break; case Opcodes.ASTORE: store(varIndex, OBJECT_TYPE); break; case Opcodes.RET: ret(varIndex); break; default: throw new IllegalArgumentException(); } } @Override public void visitTypeInsn(final int opcode, final String type) { Type objectType = Type.getObjectType(type); switch (opcode) { case Opcodes.NEW: anew(objectType); break; case Opcodes.ANEWARRAY: newarray(objectType); break; case Opcodes.CHECKCAST: checkcast(objectType); break; case Opcodes.INSTANCEOF: instanceOf(objectType); break; default: throw new IllegalArgumentException(); } } @Override public void visitFieldInsn( final int opcode, final String owner, final String name, final String descriptor) { switch (opcode) { case Opcodes.GETSTATIC: getstatic(owner, name, descriptor); break; case Opcodes.PUTSTATIC: putstatic(owner, name, descriptor); break; case Opcodes.GETFIELD: getfield(owner, name, descriptor); break; case Opcodes.PUTFIELD: putfield(owner, name, descriptor); break; default: throw new IllegalArgumentException(); } } @Override public void visitMethodInsn( final int opcodeAndSource, final String owner, final String name, final String descriptor, final boolean isInterface) { if (api < Opcodes.ASM5 && (opcodeAndSource & Opcodes.SOURCE_DEPRECATED) == 0) { // Redirect the call to the deprecated version of this method. super.visitMethodInsn(opcodeAndSource, owner, name, descriptor, isInterface); return; } int opcode = opcodeAndSource & ~Opcodes.SOURCE_MASK; switch (opcode) { case Opcodes.INVOKESPECIAL: invokespecial(owner, name, descriptor, isInterface); break; case Opcodes.INVOKEVIRTUAL: invokevirtual(owner, name, descriptor, isInterface); break; case Opcodes.INVOKESTATIC: invokestatic(owner, name, descriptor, isInterface); break; case Opcodes.INVOKEINTERFACE: invokeinterface(owner, name, descriptor); break; default: throw new IllegalArgumentException(); } } @Override public void visitInvokeDynamicInsn( final String name, final String descriptor, final Handle bootstrapMethodHandle, final Object... bootstrapMethodArguments) { invokedynamic(name, descriptor, bootstrapMethodHandle, bootstrapMethodArguments); } @Override public void visitJumpInsn(final int opcode, final Label label) { switch (opcode) { case Opcodes.IFEQ: ifeq(label); break; case Opcodes.IFNE: ifne(label); break; case Opcodes.IFLT: iflt(label); break; case Opcodes.IFGE: ifge(label); break; case Opcodes.IFGT: ifgt(label); break; case Opcodes.IFLE: ifle(label); break; case Opcodes.IF_ICMPEQ: ificmpeq(label); break; case Opcodes.IF_ICMPNE: ificmpne(label); break; case Opcodes.IF_ICMPLT: ificmplt(label); break; case Opcodes.IF_ICMPGE: ificmpge(label); break; case Opcodes.IF_ICMPGT: ificmpgt(label); break; case Opcodes.IF_ICMPLE: ificmple(label); break; case Opcodes.IF_ACMPEQ: ifacmpeq(label); break; case Opcodes.IF_ACMPNE: ifacmpne(label); break; case Opcodes.GOTO: goTo(label); break; case Opcodes.JSR: jsr(label); break; case Opcodes.IFNULL: ifnull(label); break; case Opcodes.IFNONNULL: ifnonnull(label); break; default: throw new IllegalArgumentException(); } } @Override public void visitLabel(final Label label) { mark(label); } @Override public void visitLdcInsn(final Object value) { if (api < Opcodes.ASM5 && (value instanceof Handle || (value instanceof Type && ((Type) value).getSort() == Type.METHOD))) { throw new UnsupportedOperationException("This feature requires ASM5"); } if (api < Opcodes.ASM7 && value instanceof ConstantDynamic) { throw new UnsupportedOperationException("This feature requires ASM7"); } if (value instanceof Integer) { iconst((Integer) value); } else if (value instanceof Byte) { iconst(((Byte) value).intValue()); } else if (value instanceof Character) { iconst(((Character) value).charValue()); } else if (value instanceof Short) { iconst(((Short) value).intValue()); } else if (value instanceof Boolean) { iconst(((Boolean) value).booleanValue() ? 1 : 0); } else if (value instanceof Float) { fconst((Float) value); } else if (value instanceof Long) { lconst((Long) value); } else if (value instanceof Double) { dconst((Double) value); } else if (value instanceof String) { aconst(value); } else if (value instanceof Type) { tconst((Type) value); } else if (value instanceof Handle) { hconst((Handle) value); } else if (value instanceof ConstantDynamic) { cconst((ConstantDynamic) value); } else { throw new IllegalArgumentException(); } } @Override public void visitIincInsn(final int varIndex, final int increment) { iinc(varIndex, increment); } @Override public void visitTableSwitchInsn( final int min, final int max, final Label dflt, final Label... labels) { tableswitch(min, max, dflt, labels); } @Override public void visitLookupSwitchInsn(final Label dflt, final int[] keys, final Label[] labels) { lookupswitch(dflt, keys, labels); } @Override public void visitMultiANewArrayInsn(final String descriptor, final int numDimensions) { multianewarray(descriptor, numDimensions); } // ----------------------------------------------------------------------------------------------- /** Generates a nop instruction. */ public void nop() { mv.visitInsn(Opcodes.NOP); } /** * Generates the instruction to push the given value on the stack. * * @param value the constant to be pushed on the stack. This parameter must be an {@link Integer}, * a {@link Float}, a {@link Long}, a {@link Double}, a {@link String}, a {@link Type} of * OBJECT or ARRAY sort for {@code .class} constants, for classes whose version is 49, a * {@link Type} of METHOD sort for MethodType, a {@link Handle} for MethodHandle constants, * for classes whose version is 51 or a {@link ConstantDynamic} for a constant dynamic for * classes whose version is 55. */ public void aconst(final Object value) { if (value == null) { mv.visitInsn(Opcodes.ACONST_NULL); } else { mv.visitLdcInsn(value); } } /** * Generates the instruction to push the given value on the stack. * * @param intValue the constant to be pushed on the stack. */ public void iconst(final int intValue) { if (intValue >= -1 && intValue <= 5) { mv.visitInsn(Opcodes.ICONST_0 + intValue); } else if (intValue >= Byte.MIN_VALUE && intValue <= Byte.MAX_VALUE) { mv.visitIntInsn(Opcodes.BIPUSH, intValue); } else if (intValue >= Short.MIN_VALUE && intValue <= Short.MAX_VALUE) { mv.visitIntInsn(Opcodes.SIPUSH, intValue); } else { mv.visitLdcInsn(intValue); } } /** * Generates the instruction to push the given value on the stack. * * @param longValue the constant to be pushed on the stack. */ public void lconst(final long longValue) { if (longValue == 0L || longValue == 1L) { mv.visitInsn(Opcodes.LCONST_0 + (int) longValue); } else { mv.visitLdcInsn(longValue); } } /** * Generates the instruction to push the given value on the stack. * * @param floatValue the constant to be pushed on the stack. */ public void fconst(final float floatValue) { int bits = Float.floatToIntBits(floatValue); if (bits == 0L || bits == 0x3F800000 || bits == 0x40000000) { // 0..2 mv.visitInsn(Opcodes.FCONST_0 + (int) floatValue); } else { mv.visitLdcInsn(floatValue); } } /** * Generates the instruction to push the given value on the stack. * * @param doubleValue the constant to be pushed on the stack. */ public void dconst(final double doubleValue) { long bits = Double.doubleToLongBits(doubleValue); if (bits == 0L || bits == 0x3FF0000000000000L) { // +0.0d and 1.0d mv.visitInsn(Opcodes.DCONST_0 + (int) doubleValue); } else { mv.visitLdcInsn(doubleValue); } } /** * Generates the instruction to push the given type on the stack. * * @param type the type to be pushed on the stack. */ public void tconst(final Type type) { mv.visitLdcInsn(type); } /** * Generates the instruction to push the given handle on the stack. * * @param handle the handle to be pushed on the stack. */ public void hconst(final Handle handle) { mv.visitLdcInsn(handle); } /** * Generates the instruction to push the given constant dynamic on the stack. * * @param constantDynamic the constant dynamic to be pushed on the stack. */ public void cconst(final ConstantDynamic constantDynamic) { mv.visitLdcInsn(constantDynamic); } public void load(final int varIndex, final Type type) { mv.visitVarInsn(type.getOpcode(Opcodes.ILOAD), varIndex); } public void aload(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IALOAD)); } public void store(final int varIndex, final Type type) { mv.visitVarInsn(type.getOpcode(Opcodes.ISTORE), varIndex); } public void astore(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IASTORE)); } public void pop() { mv.visitInsn(Opcodes.POP); } public void pop2() { mv.visitInsn(Opcodes.POP2); } public void dup() { mv.visitInsn(Opcodes.DUP); } public void dup2() { mv.visitInsn(Opcodes.DUP2); } public void dupX1() { mv.visitInsn(Opcodes.DUP_X1); } public void dupX2() { mv.visitInsn(Opcodes.DUP_X2); } public void dup2X1() { mv.visitInsn(Opcodes.DUP2_X1); } public void dup2X2() { mv.visitInsn(Opcodes.DUP2_X2); } public void swap() { mv.visitInsn(Opcodes.SWAP); } public void add(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IADD)); } public void sub(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.ISUB)); } public void mul(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IMUL)); } public void div(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IDIV)); } public void rem(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IREM)); } public void neg(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.INEG)); } public void shl(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.ISHL)); } public void shr(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.ISHR)); } public void ushr(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IUSHR)); } public void and(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IAND)); } public void or(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IOR)); } public void xor(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IXOR)); } public void iinc(final int varIndex, final int increment) { mv.visitIincInsn(varIndex, increment); } /** * Generates the instruction to cast from the first given type to the other. * * @param from a Type. * @param to a Type. */ public void cast(final Type from, final Type to) { cast(mv, from, to); } /** * Generates the instruction to cast from the first given type to the other. * * @param methodVisitor the method visitor to use to generate the instruction. * @param from a Type. * @param to a Type. */ static void cast(final MethodVisitor methodVisitor, final Type from, final Type to) { if (from != to) { if (from == Type.DOUBLE_TYPE) { if (to == Type.FLOAT_TYPE) { methodVisitor.visitInsn(Opcodes.D2F); } else if (to == Type.LONG_TYPE) { methodVisitor.visitInsn(Opcodes.D2L); } else { methodVisitor.visitInsn(Opcodes.D2I); cast(methodVisitor, Type.INT_TYPE, to); } } else if (from == Type.FLOAT_TYPE) { if (to == Type.DOUBLE_TYPE) { methodVisitor.visitInsn(Opcodes.F2D); } else if (to == Type.LONG_TYPE) { methodVisitor.visitInsn(Opcodes.F2L); } else { methodVisitor.visitInsn(Opcodes.F2I); cast(methodVisitor, Type.INT_TYPE, to); } } else if (from == Type.LONG_TYPE) { if (to == Type.DOUBLE_TYPE) { methodVisitor.visitInsn(Opcodes.L2D); } else if (to == Type.FLOAT_TYPE) { methodVisitor.visitInsn(Opcodes.L2F); } else { methodVisitor.visitInsn(Opcodes.L2I); cast(methodVisitor, Type.INT_TYPE, to); } } else { if (to == Type.BYTE_TYPE) { methodVisitor.visitInsn(Opcodes.I2B); } else if (to == Type.CHAR_TYPE) { methodVisitor.visitInsn(Opcodes.I2C); } else if (to == Type.DOUBLE_TYPE) { methodVisitor.visitInsn(Opcodes.I2D); } else if (to == Type.FLOAT_TYPE) { methodVisitor.visitInsn(Opcodes.I2F); } else if (to == Type.LONG_TYPE) { methodVisitor.visitInsn(Opcodes.I2L); } else if (to == Type.SHORT_TYPE) { methodVisitor.visitInsn(Opcodes.I2S); } } } } public void lcmp() { mv.visitInsn(Opcodes.LCMP); } public void cmpl(final Type type) { mv.visitInsn(type == Type.FLOAT_TYPE ? Opcodes.FCMPL : Opcodes.DCMPL); } public void cmpg(final Type type) { mv.visitInsn(type == Type.FLOAT_TYPE ? Opcodes.FCMPG : Opcodes.DCMPG); } public void ifeq(final Label label) { mv.visitJumpInsn(Opcodes.IFEQ, label); } public void ifne(final Label label) { mv.visitJumpInsn(Opcodes.IFNE, label); } public void iflt(final Label label) { mv.visitJumpInsn(Opcodes.IFLT, label); } public void ifge(final Label label) { mv.visitJumpInsn(Opcodes.IFGE, label); } public void ifgt(final Label label) { mv.visitJumpInsn(Opcodes.IFGT, label); } public void ifle(final Label label) { mv.visitJumpInsn(Opcodes.IFLE, label); } public void ificmpeq(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPEQ, label); } public void ificmpne(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPNE, label); } public void ificmplt(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPLT, label); } public void ificmpge(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPGE, label); } public void ificmpgt(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPGT, label); } public void ificmple(final Label label) { mv.visitJumpInsn(Opcodes.IF_ICMPLE, label); } public void ifacmpeq(final Label label) { mv.visitJumpInsn(Opcodes.IF_ACMPEQ, label); } public void ifacmpne(final Label label) { mv.visitJumpInsn(Opcodes.IF_ACMPNE, label); } public void goTo(final Label label) { mv.visitJumpInsn(Opcodes.GOTO, label); } public void jsr(final Label label) { mv.visitJumpInsn(Opcodes.JSR, label); } public void ret(final int varIndex) { mv.visitVarInsn(Opcodes.RET, varIndex); } public void tableswitch(final int min, final int max, final Label dflt, final Label... labels) { mv.visitTableSwitchInsn(min, max, dflt, labels); } public void lookupswitch(final Label dflt, final int[] keys, final Label[] labels) { mv.visitLookupSwitchInsn(dflt, keys, labels); } public void areturn(final Type type) { mv.visitInsn(type.getOpcode(Opcodes.IRETURN)); } public void getstatic(final String owner, final String name, final String descriptor) { mv.visitFieldInsn(Opcodes.GETSTATIC, owner, name, descriptor); } public void putstatic(final String owner, final String name, final String descriptor) { mv.visitFieldInsn(Opcodes.PUTSTATIC, owner, name, descriptor); } public void getfield(final String owner, final String name, final String descriptor) { mv.visitFieldInsn(Opcodes.GETFIELD, owner, name, descriptor); } public void putfield(final String owner, final String name, final String descriptor) { mv.visitFieldInsn(Opcodes.PUTFIELD, owner, name, descriptor); } /** * Deprecated. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @deprecated use {@link #invokevirtual(String, String, String, boolean)} instead. */ @Deprecated public void invokevirtual(final String owner, final String name, final String descriptor) { if (api >= Opcodes.ASM5) { invokevirtual(owner, name, descriptor, false); return; } mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, descriptor); } /** * Generates the instruction to call the given virtual method. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @param isInterface if the method's owner class is an interface. */ public void invokevirtual( final String owner, final String name, final String descriptor, final boolean isInterface) { if (api < Opcodes.ASM5) { if (isInterface) { throw new UnsupportedOperationException("INVOKEVIRTUAL on interfaces require ASM 5"); } invokevirtual(owner, name, descriptor); return; } mv.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, descriptor, isInterface); } /** * Deprecated. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @deprecated use {@link #invokespecial(String, String, String, boolean)} instead. */ @Deprecated public void invokespecial(final String owner, final String name, final String descriptor) { if (api >= Opcodes.ASM5) { invokespecial(owner, name, descriptor, false); return; } mv.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, descriptor, false); } /** * Generates the instruction to call the given special method. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @param isInterface if the method's owner class is an interface. */ public void invokespecial( final String owner, final String name, final String descriptor, final boolean isInterface) { if (api < Opcodes.ASM5) { if (isInterface) { throw new UnsupportedOperationException("INVOKESPECIAL on interfaces require ASM 5"); } invokespecial(owner, name, descriptor); return; } mv.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, descriptor, isInterface); } /** * Deprecated. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @deprecated use {@link #invokestatic(String, String, String, boolean)} instead. */ @Deprecated public void invokestatic(final String owner, final String name, final String descriptor) { if (api >= Opcodes.ASM5) { invokestatic(owner, name, descriptor, false); return; } mv.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, descriptor, false); } /** * Generates the instruction to call the given static method. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @param isInterface if the method's owner class is an interface. */ public void invokestatic( final String owner, final String name, final String descriptor, final boolean isInterface) { if (api < Opcodes.ASM5) { if (isInterface) { throw new UnsupportedOperationException("INVOKESTATIC on interfaces require ASM 5"); } invokestatic(owner, name, descriptor); return; } mv.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, descriptor, isInterface); } /** * Generates the instruction to call the given interface method. * * @param owner the internal name of the method's owner class (see {@link * Type#getInternalName()}). * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). */ public void invokeinterface(final String owner, final String name, final String descriptor) { mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, descriptor, true); } /** * Generates the instruction to call the given dynamic method. * * @param name the method's name. * @param descriptor the method's descriptor (see {@link Type}). * @param bootstrapMethodHandle the bootstrap method. * @param bootstrapMethodArguments the bootstrap method constant arguments. Each argument must be * an {@link Integer}, {@link Float}, {@link Long}, {@link Double}, {@link String}, {@link * Type}, {@link Handle} or {@link ConstantDynamic} value. This method is allowed to modify * the content of the array so a caller should expect that this array may change. */ public void invokedynamic( final String name, final String descriptor, final Handle bootstrapMethodHandle, final Object[] bootstrapMethodArguments) { mv.visitInvokeDynamicInsn(name, descriptor, bootstrapMethodHandle, bootstrapMethodArguments); } public void anew(final Type type) { mv.visitTypeInsn(Opcodes.NEW, type.getInternalName()); } /** * Generates the instruction to create and push on the stack an array of the given type. * * @param type an array Type. */ public void newarray(final Type type) { newarray(mv, type); } /** * Generates the instruction to create and push on the stack an array of the given type. * * @param methodVisitor the method visitor to use to generate the instruction. * @param type an array Type. */ static void newarray(final MethodVisitor methodVisitor, final Type type) { int arrayType; switch (type.getSort()) { case Type.BOOLEAN: arrayType = Opcodes.T_BOOLEAN; break; case Type.CHAR: arrayType = Opcodes.T_CHAR; break; case Type.BYTE: arrayType = Opcodes.T_BYTE; break; case Type.SHORT: arrayType = Opcodes.T_SHORT; break; case Type.INT: arrayType = Opcodes.T_INT; break; case Type.FLOAT: arrayType = Opcodes.T_FLOAT; break; case Type.LONG: arrayType = Opcodes.T_LONG; break; case Type.DOUBLE: arrayType = Opcodes.T_DOUBLE; break; default: methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, type.getInternalName()); return; } methodVisitor.visitIntInsn(Opcodes.NEWARRAY, arrayType); } public void arraylength() { mv.visitInsn(Opcodes.ARRAYLENGTH); } public void athrow() { mv.visitInsn(Opcodes.ATHROW); } public void checkcast(final Type type) { mv.visitTypeInsn(Opcodes.CHECKCAST, type.getInternalName()); } public void instanceOf(final Type type) { mv.visitTypeInsn(Opcodes.INSTANCEOF, type.getInternalName()); } public void monitorenter() { mv.visitInsn(Opcodes.MONITORENTER); } public void monitorexit() { mv.visitInsn(Opcodes.MONITOREXIT); } public void multianewarray(final String descriptor, final int numDimensions) { mv.visitMultiANewArrayInsn(descriptor, numDimensions); } public void ifnull(final Label label) { mv.visitJumpInsn(Opcodes.IFNULL, label); } public void ifnonnull(final Label label) { mv.visitJumpInsn(Opcodes.IFNONNULL, label); } public void mark(final Label label) { mv.visitLabel(label); } }
googleapis/google-cloud-java
37,805
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/ListEntityTypesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/entity_type.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The request message for * [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ListEntityTypesRequest} */ public final class ListEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) ListEntityTypesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListEntityTypesRequest.newBuilder() to construct. private ListEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEntityTypesRequest() { parent_ = ""; languageCode_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEntityTypesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.class, com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LANGUAGE_CODE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object languageCode_ = ""; /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @return The languageCode. */ @java.lang.Override public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @return The bytes for languageCode. */ @java.lang.Override public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * The maximum number of items to return in a single page. By default 100 and * at most 1000. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest other = (com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getLanguageCode().equals(other.getLanguageCode())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3.EntityTypes.ListEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.ListEntityTypesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.class, com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; languageCode_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.EntityTypeProto .internal_static_google_cloud_dialogflow_cx_v3_ListEntityTypesRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest build() { com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest buildPartial() { com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest result = new com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.languageCode_ = languageCode_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest other) { if (other == com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { languageCode_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The agent to list all entity types for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object languageCode_ = ""; /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @return The languageCode. */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @return The bytes for languageCode. */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @param value The languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @return This builder for chaining. */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The language to list entity types for. The following fields are language * dependent: * * * `EntityType.entities.value` * * `EntityType.entities.synonyms` * * `EntityType.excluded_phrases.value` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 2;</code> * * @param value The bytes for languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of items to return in a single page. By default 100 and * at most 1000. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of items to return in a single page. By default 100 and * at most 1000. * </pre> * * <code>int32 page_size = 3;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The maximum number of items to return in a single page. By default 100 and * at most 1000. * </pre> * * <code>int32 page_size = 3;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The next_page_token value returned from a previous list request. * </pre> * * <code>string page_token = 4;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.ListEntityTypesRequest) private static final com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest(); } public static com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEntityTypesRequest> PARSER = new com.google.protobuf.AbstractParser<ListEntityTypesRequest>() { @java.lang.Override public ListEntityTypesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEntityTypesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEntityTypesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.ListEntityTypesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/james-project
38,216
mailbox/cassandra/src/test/java/org/apache/james/mailbox/cassandra/mail/CassandraMessageIdToImapUidDAOTest.java
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mailbox.cassandra.mail; import static org.assertj.core.api.Assertions.assertThat; import java.util.Date; import java.util.List; import java.util.Optional; import jakarta.mail.Flags; import org.apache.james.backends.cassandra.CassandraCluster; import org.apache.james.backends.cassandra.CassandraClusterExtension; import org.apache.james.backends.cassandra.components.CassandraDataDefinition; import org.apache.james.backends.cassandra.init.configuration.CassandraConfiguration; import org.apache.james.backends.cassandra.versions.CassandraSchemaVersionDataDefinition; import org.apache.james.blob.api.PlainBlobId; import org.apache.james.mailbox.MessageUid; import org.apache.james.mailbox.ModSeq; import org.apache.james.mailbox.cassandra.ids.CassandraId; import org.apache.james.mailbox.cassandra.ids.CassandraMessageId; import org.apache.james.mailbox.cassandra.modules.CassandraMessageDataDefinition; import org.apache.james.mailbox.model.ComposedMessageId; import org.apache.james.mailbox.model.ComposedMessageIdWithMetaData; import org.apache.james.mailbox.model.ThreadId; import org.apache.james.mailbox.model.UpdatedFlags; import org.assertj.core.api.SoftAssertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import com.datastax.oss.driver.api.core.uuid.Uuids; import reactor.core.publisher.Flux; class CassandraMessageIdToImapUidDAOTest { private static final PlainBlobId HEADER_BLOB_ID_1 = new PlainBlobId.Factory().of("abc"); private static final CassandraDataDefinition MODULE = CassandraDataDefinition.aggregateModules( CassandraSchemaVersionDataDefinition.MODULE, CassandraMessageDataDefinition.MODULE); @RegisterExtension static CassandraClusterExtension cassandraCluster = new CassandraClusterExtension(MODULE); private CassandraMessageIdToImapUidDAO testee; @BeforeEach void setUp(CassandraCluster cassandra) { CassandraMessageId.Factory messageIdFactory = new CassandraMessageId.Factory(); testee = new CassandraMessageIdToImapUidDAO( cassandra.getConf(), new PlainBlobId.Factory(), CassandraConfiguration.DEFAULT_CONFIGURATION); } @Test void deleteShouldNotThrowWhenRowDoesntExist() { testee.delete(CassandraMessageId.Factory.of(Uuids.timeBased()), CassandraId.timeBased()) .block(); } @Test void deleteShouldDeleteWhenRowExists() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); testee.delete(messageId, mailboxId).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.of(mailboxId)).collectList().block(); assertThat(messages).isEmpty(); } @Test void deleteShouldDeleteOnlyConcernedRowWhenMultipleRowExists() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); CassandraId mailboxId2 = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); MessageUid messageUid2 = MessageUid.of(2); Flux.merge( testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()), testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId2, messageId, messageUid2)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build())) .blockLast(); testee.delete(messageId, mailboxId).block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId2, messageId, messageUid2)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void insertShouldWork() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void shouldHandleNullSaveDateWell() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .saveDate(Optional.empty()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages.get(0).getSaveDate()).isEmpty(); } @Test void shouldHandleSaveDateWell() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); Optional<Date> saveDate = Optional.of(new Date()); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .saveDate(saveDate) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages.get(0).getSaveDate()).isEqualTo(saveDate); } @Test void updateShouldReturnTrueWhenOldModSeqMatches() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageIdWithMetaData composedMessageIdWithMetaData = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); testee.insert(CassandraMessageMetadata.builder() .ids(composedMessageIdWithMetaData) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(new Flags(Flags.Flag.ANSWERED)) .build(); Boolean result = testee.updateMetadata(composedMessageIdWithMetaData.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); assertThat(result).isTrue(); } @Test void updateShouldReturnFalseWhenOldModSeqDoesntMatch() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageIdWithMetaData composedMessageIdWithFlags = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); testee.insert(CassandraMessageMetadata.builder() .ids(composedMessageIdWithFlags) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(new Flags(Flags.Flag.ANSWERED)) .build(); Boolean result = testee.updateMetadata(composedMessageIdWithFlags.getComposedMessageId(), updatedFlags, ModSeq.of(3)).block(); assertThat(result).isFalse(); } @Test void updateShouldUpdateModSeq() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.ANSWERED)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateAnsweredFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.ANSWERED)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(new Flags(Flags.Flag.ANSWERED)) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateDeletedFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.DELETED)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateDraftFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.DRAFT)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateFlaggedFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.FLAGGED)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateRecentFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.RECENT)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateSeenFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.SEEN)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateUserFlag() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags(Flags.Flag.USER)) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldUpdateUserFlags() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); Flags flags = new Flags(); flags.add("myCustomFlag"); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(flags) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags()) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void updateShouldRemoveUserFlags() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageId composedMessageId = new ComposedMessageId(mailboxId, messageId, messageUid); Flags flags = new Flags(); flags.add("myCustomFlag"); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(flags) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(composedMessageId) .flags(new Flags()) .modSeq(ModSeq.of(2)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); UpdatedFlags updatedFlags = UpdatedFlags.builder() .uid(messageUid) .messageId(messageId) .modSeq(ModSeq.of(2)) .oldFlags(new Flags("myCustomFlag")) .newFlags(expectedComposedMessageId.getFlags()) .build(); testee.updateMetadata(expectedComposedMessageId.getComposedMessageId(), updatedFlags, ModSeq.of(1)).block(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void retrieveShouldReturnOneMessageWhenKeyMatches() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()) .block(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); } @Test void retrieveShouldReturnMultipleMessagesWhenMessageIdMatches() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); CassandraId mailboxId2 = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); MessageUid messageUid2 = MessageUid.of(2); Flux.merge( testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build()), testee.insert(CassandraMessageMetadata.builder() .ids(ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId2, messageId, messageUid2)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build()) .internalDate(new Date()) .bodyStartOctet(18L) .size(36L) .headerContent(Optional.of(HEADER_BLOB_ID_1)) .build())) .blockLast(); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); ComposedMessageIdWithMetaData expectedComposedMessageId2 = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId2, messageId, messageUid2)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId, expectedComposedMessageId2); } @Test void retrieveMessageShouldHandlePossibleNullInternalDate() { CassandraMessageId messageId = CassandraMessageId.Factory.of(Uuids.timeBased()); CassandraId mailboxId = CassandraId.timeBased(); MessageUid messageUid = MessageUid.of(1); ComposedMessageIdWithMetaData expectedComposedMessageId = ComposedMessageIdWithMetaData.builder() .composedMessageId(new ComposedMessageId(mailboxId, messageId, messageUid)) .flags(new Flags()) .modSeq(ModSeq.of(1)) .threadId(ThreadId.fromBaseMessageId(messageId)) .build(); testee.insertNullInternalDateAndHeaderContent(CassandraMessageMetadata.builder() .ids(expectedComposedMessageId) .build()) .block(); SoftAssertions.assertSoftly(softAssertions -> { softAssertions.assertThatCode(() -> testee.retrieveAllMessages().collectList().block()) .doesNotThrowAnyException(); List<CassandraMessageMetadata> messages = testee.retrieve(messageId, Optional.empty()).collectList().block(); softAssertions.assertThat(messages) .extracting(CassandraMessageMetadata::getComposedMessageId) .containsOnly(expectedComposedMessageId); softAssertions.assertThat(messages) .extracting(CassandraMessageMetadata::getInternalDate) .containsOnly(Optional.empty()); }); } }
apache/myfaces
38,106
impl/src/main/java/org/apache/myfaces/view/facelets/tag/faces/ComponentTagHandlerDelegate.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.view.facelets.tag.faces; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import jakarta.el.ValueExpression; import jakarta.faces.FacesWrapper; import jakarta.faces.application.Application; import jakarta.faces.application.ProjectStage; import jakarta.faces.component.ActionSource; import jakarta.faces.component.EditableValueHolder; import jakarta.faces.component.UIComponent; import jakarta.faces.component.UIOutput; import jakarta.faces.component.UniqueIdVendor; import jakarta.faces.component.ValueHolder; import jakarta.faces.component.behavior.ClientBehaviorHolder; import jakarta.faces.component.visit.VisitCallback; import jakarta.faces.component.visit.VisitContext; import jakarta.faces.component.visit.VisitResult; import jakarta.faces.context.FacesContext; import jakarta.faces.event.PhaseId; import jakarta.faces.validator.BeanValidator; import jakarta.faces.validator.Validator; import jakarta.faces.view.EditableValueHolderAttachedObjectHandler; import jakarta.faces.view.facelets.ComponentConfig; import jakarta.faces.view.facelets.ComponentHandler; import jakarta.faces.view.facelets.FaceletContext; import jakarta.faces.view.facelets.MetaRuleset; import jakarta.faces.view.facelets.TagAttribute; import jakarta.faces.view.facelets.TagException; import jakarta.faces.view.facelets.TagHandlerDelegate; import jakarta.faces.view.facelets.ValidatorHandler; import org.apache.myfaces.application.NavigationHandlerImpl; import org.apache.myfaces.renderkit.html.util.ResourceUtils; import org.apache.myfaces.util.ExternalSpecifications; import org.apache.myfaces.component.visit.MyFacesVisitHints; import org.apache.myfaces.view.facelets.AbstractFaceletContext; import org.apache.myfaces.view.facelets.ComponentState; import org.apache.myfaces.view.facelets.PartialStateManagementStrategy; import org.apache.myfaces.view.facelets.FaceletCompositionContext; import org.apache.myfaces.view.facelets.FaceletDynamicComponentRefreshTransientBuildEvent; import org.apache.myfaces.view.facelets.FaceletViewDeclarationLanguage; import org.apache.myfaces.view.facelets.FaceletViewDeclarationLanguageBase; import org.apache.myfaces.view.facelets.el.CompositeComponentELUtils; import org.apache.myfaces.view.facelets.tag.MetaRulesetImpl; import org.apache.myfaces.view.facelets.tag.faces.core.AjaxHandler; import org.apache.myfaces.view.facelets.tag.faces.core.FacetHandler; /** * * Implementation of the tag logic used in the Faces specification. * * See org.apache.myfaces.view.facelets.tag.faces.ComponentHandler * @author Leonardo Uribe (latest modification by $Author$) * @version $Revision$ $Date$ * * @since 2.0 */ public class ComponentTagHandlerDelegate extends TagHandlerDelegate { private final static Logger log = Logger.getLogger(ComponentTagHandlerDelegate.class.getName()); private final ComponentHandler _delegate; private final String _componentType; private final TagAttribute _id; private final String _rendererType; private final ComponentBuilderHandler _componentBuilderHandlerDelegate; private final RelocatableResourceHandler _relocatableResourceHandler; @SuppressWarnings("unchecked") public ComponentTagHandlerDelegate(ComponentHandler delegate) { _delegate = delegate; ComponentConfig delegateComponentConfig = delegate.getComponentConfig(); _componentType = delegateComponentConfig.getComponentType(); _rendererType = delegateComponentConfig.getRendererType(); _id = delegate.getTagAttribute("id"); ComponentHandler handler = _delegate; boolean found = false; while(handler != null && !found) { if (handler instanceof ComponentBuilderHandler) { found = true; } else if (handler instanceof FacesWrapper wrapper) { handler = (ComponentHandler) wrapper.getWrapped(); } else { handler = null; } } if (found) { _componentBuilderHandlerDelegate = (ComponentBuilderHandler) handler; } else { _componentBuilderHandlerDelegate = null; } //Check if this component is instance of RelocatableResourceHandler handler = _delegate; found = false; while(handler != null && !found) { if (handler instanceof RelocatableResourceHandler) { found = true; } else if (handler instanceof FacesWrapper wrapper) { handler = (ComponentHandler) wrapper.getWrapped(); } else { handler = null; } } if (found) { _relocatableResourceHandler = (RelocatableResourceHandler) handler; } else { // Check if the component is a relocatable component done overriding the tag handler if (_componentType != null && _rendererType != null && (_rendererType.equals(ResourceUtils.DEFAULT_SCRIPT_RENDERER_TYPE) || _rendererType.equals(ResourceUtils.DEFAULT_STYLESHEET_RENDERER_TYPE)) && _componentType.equals(UIOutput.COMPONENT_TYPE)) { _relocatableResourceHandler = ComponentRelocatableResourceHandler.INSTANCE; } else { _relocatableResourceHandler = null; } } } /** * Method handles UIComponent tree creation in accordance with the Faces 1.2 spec. * <ol> * <li>First determines this UIComponent's id by calling {@link #getId(FaceletContext) getId(FaceletContext)}.</li> * <li>Search the parent for an existing UIComponent of the id we just grabbed</li> * <li>If found, {@link FaceletCompositionContext#markForDeletion(UIComponent) mark} its children for deletion.</li> * <li>If <i>not</i> found, call {@link #createComponent(FaceletContext) createComponent}. * <ol> * <li>Only here do we apply TagHandler#setAttributes(FaceletCompositionContext, Object)</li> * <li>Set the UIComponent's id</li> * <li>Set the RendererType of this instance</li> * </ol> * </li> * <li>Now apply the nextHandler, passing the UIComponent we've created/found.</li> * <li>Now add the UIComponent to the passed parent</li> * <li>Lastly, if the UIComponent already existed (found), * then #finalizeForDeletion(FaceletCompositionContext, UIComponent) * for deletion.</li> * </ol> * * See jakarta.faces.view.facelets.FaceletHandler#apply(jakarta.faces.view.facelets.FaceletContext, * jakarta.faces.component.UIComponent) * * @throws TagException * if the UIComponent parent is null */ @SuppressWarnings("unchecked") @Override public void apply(FaceletContext ctx, UIComponent parent) throws IOException { // make sure our parent is not null if (parent == null) { throw new TagException(_delegate.getTag(), "Parent UIComponent was null"); } FacesContext facesContext = ctx.getFacesContext(); // possible facet scoped String facetName = this.getFacetName(ctx, parent); // our id String id = ctx.generateUniqueId(_delegate.getTagId()); // Cast to use UniqueIdVendor stuff FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(ctx); // grab our component UIComponent c = null; //Used to preserve the original parent. Note when the view is being refreshed, the real parent could be //another component. UIComponent oldParent = parent; if (mctx.isRefreshingSection()) { if (_relocatableResourceHandler != null) { c = _relocatableResourceHandler.findChildByTagId(ctx, parent, id); } else { if (facetName != null) { c = ComponentSupport.findChildInFacetByTagId(parent, id, facetName); } else { c = ComponentSupport.findChildInChildrenByTagId(parent, id); } } } boolean componentFound = false; if (c != null) { componentFound = true; // Check if the binding needs dynamic refresh and if that so, invoke the refresh from this location, to // preserve the same context if (_delegate.getBinding() != null && c.getAttributes().containsKey( FaceletDynamicComponentRefreshTransientBuildEvent.DYNAMIC_COMPONENT_BINDING_NEEDS_REFRESH)) { VisitContext visitContext = mctx.getVisitContextFactory(). getVisitContext(facesContext, null, MyFacesVisitHints.SET_SKIP_ITERATION); c.visitTree(visitContext, PublishFaceletDynamicComponentRefreshTransientBuildCallback.INSTANCE); } mctx.incrementUniqueComponentId(); // mark all children for cleaning if (log.isLoggable(Level.FINE)) { log.fine(_delegate.getTag() + " Component[" + id + "] Found, marking children for cleanup"); } // The call for mctx.markForDeletion(c) is always necessary, because // component resource relocation occur as an effect of PostAddToViewEvent, // so at this point it is unknown if the component was relocated or not. mctx.markForDeletion(c); if (_relocatableResourceHandler != null) { mctx.markRelocatableResourceForDeletion(c); } } else { c = this.createComponent(ctx); if (log.isLoggable(Level.FINE)) { log.fine(_delegate.getTag() + " Component[" + id + "] Created: " + c.getClass().getName()); } _delegate.setAttributes(ctx, c); // mark it owned by a facelet instance c.getAttributes().put(ComponentSupport.MARK_CREATED, id); if (facesContext.isProjectStage(ProjectStage.Development)) { c.getAttributes().put(UIComponent.VIEW_LOCATION_KEY, _delegate.getTag().getLocation()); } // assign our unique id if (this._id != null) { mctx.incrementUniqueComponentId(); c.setId(this._id.getValue(ctx)); } else { String componentId = mctx.generateUniqueComponentId(); UniqueIdVendor uniqueIdVendor = mctx.getUniqueIdVendorFromStack(); if (uniqueIdVendor == null) { uniqueIdVendor = facesContext.getViewRoot(); if (uniqueIdVendor == null) { // facesContext.getViewRoot() returns null here if we are in // phase restore view, so we have to try to get the view root // via the method in ComponentSupport and our parent uniqueIdVendor = ComponentSupport.getViewRoot(ctx, parent); } } if (uniqueIdVendor != null) { // UIViewRoot implements UniqueIdVendor, so there is no need to cast to UIViewRoot // and call createUniqueId() String uid = uniqueIdVendor.createUniqueId(facesContext, componentId); c.setId(uid); } } if (this._rendererType != null) { c.setRendererType(this._rendererType); } // hook method _delegate.onComponentCreated(ctx, c, parent); if (_relocatableResourceHandler != null && _relocatableResourceHandler instanceof ComponentRelocatableResourceHandler) { UIComponent parentCompositeComponent = mctx.getCompositeComponentFromStack(); if (parentCompositeComponent != null) { c.getAttributes().put(CompositeComponentELUtils.LOCATION_KEY, parentCompositeComponent.getAttributes().get(CompositeComponentELUtils.LOCATION_KEY)); } } if (mctx.isRefreshingTransientBuild() && _relocatableResourceHandler != null) { mctx.markRelocatableResourceForDeletion(c); } } c.pushComponentToEL(facesContext, c); if (c instanceof UniqueIdVendor vendor) { mctx.pushUniqueIdVendorToStack(vendor); } if (mctx.isDynamicComponentTopLevel()) { mctx.setDynamicComponentTopLevel(false); _delegate.applyNextHandler(ctx, c); mctx.setDynamicComponentTopLevel(true); } else { // first allow c to get populated _delegate.applyNextHandler(ctx, c); } boolean oldProcessingEvents = facesContext.isProcessingEvents(); // finish cleaning up orphaned children if (componentFound && !mctx.isDynamicComponentTopLevel()) { mctx.finalizeForDeletion(c); if (mctx.isRefreshingSection()) { facesContext.setProcessingEvents(false); if (_relocatableResourceHandler != null && parent != null && !parent.equals(c.getParent())) { // Replace parent with the relocated parent. parent = c.getParent(); // Since we changed the parent, the facetName becomes invalid, because it points // to the component before relocation. We need to find the right facetName (if any) so we can // refresh the component properly. UIComponent c1 = ComponentSupport.findChildInChildrenByTagId(parent, id); if (c1 == null) { facetName = ComponentSupport.findChildInFacetsByTagId(parent, id); } else { facetName = null; } } ComponentSupport.setCachedFacesContext(c, facesContext); } if (facetName == null) { parent.getChildren().remove(c); } else { ComponentSupport.removeFacet(ctx, parent, c, facetName); } if (mctx.isRefreshingSection()) { ComponentSupport.setCachedFacesContext(c, null); facesContext.setProcessingEvents(oldProcessingEvents); } } if (!componentFound) { if (c instanceof ClientBehaviorHolder && !UIComponent.isCompositeComponent(c)) { Iterator<AjaxHandler> it = ((AbstractFaceletContext) ctx).getAjaxHandlers(); if (it != null) { while(it.hasNext()) { it.next().applyAttachedObject(facesContext, c); } } } if (c instanceof EditableValueHolder holder) { // add default validators here, because this feature // is only available in facelets (see MYFACES-2362 for details) addEnclosingAndDefaultValidators(ctx, mctx, facesContext, holder); } } _delegate.onComponentPopulated(ctx, c, oldParent); if (!mctx.isDynamicComponentTopLevel() || !componentFound) { if (componentFound && mctx.isRefreshingSection()) { facesContext.setProcessingEvents(false); ComponentSupport.setCachedFacesContext(c, facesContext); } if (facetName == null) { parent.getChildren().add(c); } else { ComponentSupport.addFacet(ctx, parent, c, facetName); } if (componentFound && mctx.isRefreshingSection()) { ComponentSupport.setCachedFacesContext(c, null); facesContext.setProcessingEvents(oldProcessingEvents); } } if (c instanceof UniqueIdVendor) { mctx.popUniqueIdVendorToStack(); } c.popComponentFromEL(facesContext); if (mctx.isMarkInitialState()) { //Call it only if we are using partial state saving c.markInitialState(); } } /** * Return the Facet name we are scoped in, otherwise null * * @param ctx * @return */ protected final String getFacetName(FaceletContext ctx, UIComponent parent) { return (String) parent.getAttributes().get(FacetHandler.KEY); } /** * If the binding attribute was specified, use that in conjunction with our componentType String variable to call * createComponent on the Application, otherwise just pass the componentType String. <p> If the binding was used, * then set the ValueExpression "binding" on the created UIComponent.</p> * * See * Application#createComponent(jakarta.faces.el.ValueBinding, jakarta.faces.context.FacesContext, java.lang.String) * See Application#createComponent(java.lang.String) * @param ctx * FaceletContext to use in creating a component * @return */ protected UIComponent createComponent(FaceletContext ctx) { if (_componentBuilderHandlerDelegate != null) { // the call to Application.createComponent(FacesContext, Resource) // is delegated because we don't have here the required Resource instance return _componentBuilderHandlerDelegate.createComponent(ctx); } UIComponent c = null; FacesContext faces = ctx.getFacesContext(); Application app = faces.getApplication(); if (_delegate.getBinding() != null) { ValueExpression ve = _delegate.getBinding().getValueExpression(ctx, Object.class); if (PhaseId.RESTORE_VIEW.equals(faces.getCurrentPhaseId())) { if (!ve.isReadOnly(faces.getELContext())) { try { // force reset it is an easy and cheap way to allow "binding" attribute to work on // view scope beans or flow scope beans (using a transient variable) ve.setValue(faces.getELContext(), null); } catch (Exception e) { // ignore } } } if (this._rendererType == null) { c = app.createComponent(ve, faces, this._componentType); } else { c = app.createComponent(ve, faces, this._componentType, this._rendererType); } if (c != null) { c.setValueExpression("binding", ve); if (!ve.isReadOnly(faces.getELContext())) { ComponentSupport.getViewRoot(ctx, c).getAttributes().put( NavigationHandlerImpl.CALL_PRE_DISPOSE_VIEW, Boolean.TRUE); c.subscribeToEvent(PreDisposeViewEvent.class, new ClearBindingValueExpressionListener()); } if (c.getChildCount() > 0 || c.getFacetCount() > 0) { // In this case, this component is used to hold a subtree that is generated // dynamically. In this case, the best is mark this component to be restored // fully, because this ensures the state is correctly preserved. Note this // is only necessary when the component has additional children or facets, // because those components requires an unique id provided by createUniqueId(), // and this ensures stability of the generated ids. c.getAttributes().put(PartialStateManagementStrategy.COMPONENT_ADDED_AFTER_BUILD_VIEW, ComponentState.REMOVE_ADD); if (FaceletViewDeclarationLanguageBase.isDynamicComponentNeedsRefresh(ctx.getFacesContext())) { FaceletViewDeclarationLanguageBase.resetDynamicComponentNeedsRefreshFlag( ctx.getFacesContext()); FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(ctx); if (mctx.isUsingPSSOnThisView()) { FaceletViewDeclarationLanguage.cleanTransientBuildOnRestore(faces); } else { FaceletViewDeclarationLanguageBase.activateDynamicComponentRefreshTransientBuild(faces); } // // Mark top binding component to be dynamically refreshed. In that way, facelets algorithm // will be able to decide if the component children requires to be refreshed dynamically // or not. c.getAttributes().put( FaceletDynamicComponentRefreshTransientBuildEvent. DYNAMIC_COMPONENT_BINDING_NEEDS_REFRESH, Boolean.TRUE); } } } } else { // According to the, spec call the second alternative with null rendererType gives // the same result, but without the unnecessary call for FacesContext.getCurrentInstance(). // Saves 1 call per component without rendererType (f:viewParam, h:column, f:selectItem, ...) // and it does not have any side effects (the spec javadoc mentions in a explicit way // that rendererType can be null!). c = app.createComponent(faces, this._componentType, this._rendererType); } return c; } /** * If the id TagAttribute was specified, get it's value, otherwise generate a unique id from our tagId. * * See TagAttribute#getValue(FaceletContext) * @param ctx * FaceletContext to use * @return what should be a unique Id */ protected String getId(FaceletContext ctx) { if (this._id != null) { return this._id.getValue(ctx); } return ctx.generateUniqueId(_delegate.getTagId()); } @Override public MetaRuleset createMetaRuleset(Class type) { MetaRuleset m = new MetaRulesetImpl(_delegate.getTag(), type); // ignore standard component attributes m.ignore("binding").ignore("id"); // add auto wiring for attributes m.addRule(ComponentRule.INSTANCE); // add special rule for passthrough attributes m.addRule(PassthroughRuleImpl.INSTANCE); // if it's an ActionSource if (ActionSource.class.isAssignableFrom(type)) { m.addRule(ActionSourceRule.INSTANCE); } // if it's a ValueHolder if (ValueHolder.class.isAssignableFrom(type)) { m.addRule(ValueHolderRule.INSTANCE); // if it's an EditableValueHolder if (EditableValueHolder.class.isAssignableFrom(type)) { m.ignore("submittedValue"); m.ignore("valid"); m.addRule(EditableValueHolderRule.INSTANCE); } } // allow components to specify the "class" attribute m.alias("class", "styleClass"); return m; } /** * Add the default Validators to the component. * Also adds all validators specified by enclosing <f:validateBean> tags * (e.g. the BeanValidator if it is not a default validator). * * @param context The FacesContext. * @param mctx the AbstractFaceletContext * @param component The EditableValueHolder to which the validators should be added */ private void addEnclosingAndDefaultValidators(FaceletContext ctx, FaceletCompositionContext mctx, FacesContext context, EditableValueHolder component) { // add all enclosing validators, because they have precedence over default validators. Iterator<Map.Entry<String, EditableValueHolderAttachedObjectHandler>> enclosingValidatorIds = mctx.getEnclosingValidatorIdsAndHandlers(); if (enclosingValidatorIds != null) { while (enclosingValidatorIds.hasNext()) { Map.Entry<String, EditableValueHolderAttachedObjectHandler> entry = enclosingValidatorIds.next(); addEnclosingValidator(context, component, entry.getKey(), entry.getValue()); } } // add all defaultValidators Map<String, String> defaultValidators = context.getApplication().getDefaultValidatorInfo(); if (defaultValidators != null && !defaultValidators.isEmpty()) { for (Map.Entry<String, String> entry : defaultValidators.entrySet()) { if (!mctx.containsEnclosingValidatorId(entry.getKey())) { addDefaultValidator(ctx, mctx, context, component, entry.getKey(), entry.getValue()); } } } } private void addDefaultValidator(FaceletContext ctx, FaceletCompositionContext mctx, FacesContext context, EditableValueHolder component, String validatorId, String validatorClassName) { Validator enclosingValidator = null; if (validatorClassName == null) { // we have no class name for validators of enclosing <f:validateBean> tags // --> we have to create it to get the class name // note that normally we can use this instance later anyway! enclosingValidator = context.getApplication().createValidator(validatorId); validatorClassName = enclosingValidator.getClass().getName(); } // check if the validator is already registered for the given component // this happens if <f:validateBean /> is nested inside the component on the view Validator validator = null; for (Validator v : component.getValidators()) { if (v.getClass().getName().equals(validatorClassName)) { // found validator = v; break; } } if (validator == null) { if (shouldAddDefaultValidator(ctx, mctx, component, validatorId)) { if (enclosingValidator != null) { // we can use the instance from before validator = enclosingValidator; } else { // create it validator = context.getApplication().createValidator(validatorId); } // add the validator to the component component.addValidator(validator); } else { // we should not add the validator return; } } // special things to configure for a BeanValidator if (validator instanceof BeanValidator beanValidator) { // check the validationGroups String validationGroups = beanValidator.getValidationGroups(); if (validationGroups == null || validationGroups.matches(BeanValidator.EMPTY_VALIDATION_GROUPS_PATTERN)) { // no validationGroups available // --> get the validationGroups from the stack //String stackGroup = mctx.getFirstValidationGroupFromStack(); //if (stackGroup != null) //{ // validationGroups = stackGroup; //} //else //{ // no validationGroups on the stack // --> set the default validationGroup validationGroups = jakarta.validation.groups.Default.class.getName(); //} beanValidator.setValidationGroups(validationGroups); } } } /** * Determine if the validator with the given validatorId should be added. * * @param validatorId The validatorId. * @param ctx The FacesContext. * @param mctx the AbstractFaceletContext * @param component The EditableValueHolder to which the validator should be added. * @return true if the Validator should be added, false otherwise. */ @SuppressWarnings("unchecked") private boolean shouldAddDefaultValidator(FaceletContext ctx, FaceletCompositionContext mctx, EditableValueHolder component, String validatorId) { // check if the validatorId is on the exclusion list on the component List<String> exclusionList = (List<String>) ((UIComponent) component).getAttributes() .get(ValidatorTagHandlerDelegate.VALIDATOR_ID_EXCLUSION_LIST_KEY); if (exclusionList != null) { for (String excludedId : exclusionList) { if (excludedId.equals(validatorId)) { return false; } } } Iterator<Map.Entry<String, EditableValueHolderAttachedObjectHandler>> enclosingValidatorIds = mctx.getEnclosingValidatorIdsAndHandlers(); if (enclosingValidatorIds != null) { while (enclosingValidatorIds.hasNext()) { Map.Entry<String, EditableValueHolderAttachedObjectHandler> entry = enclosingValidatorIds.next(); boolean validatorIdAvailable = entry.getKey() != null && !entry.getKey().isEmpty(); if (validatorIdAvailable && entry.getKey().equals(validatorId)) { if (((FacesWrapper<ValidatorHandler>) entry.getValue()).getWrapped() .isDisabled(ctx)) { return false; } } } } // Some extra rules are required for Bean Validation. if (validatorId.equals(BeanValidator.VALIDATOR_ID)) { if (!ExternalSpecifications.isBeanValidationAvailable()) { // the BeanValidator was added as a default-validator, but // bean validation is not available on the classpath. // --> log a warning about this scenario. log.log(Level.WARNING, "Bean validation is not available on the " + "classpath, thus the BeanValidator will not be added for " + "the component " + component); return false; } } // By default, all default validators should be added return true; } private void addEnclosingValidator(FacesContext context, EditableValueHolder component, String validatorId, EditableValueHolderAttachedObjectHandler attachedObjectHandler) { if (shouldAddEnclosingValidator(component, validatorId)) { if (attachedObjectHandler != null) { attachedObjectHandler.applyAttachedObject(context, (UIComponent) component); } else { Validator validator = null; // create it validator = context.getApplication().createValidator(validatorId); // special things to configure for a BeanValidator if (validator instanceof BeanValidator beanValidator) { // check the validationGroups String validationGroups = beanValidator.getValidationGroups(); if (validationGroups == null || validationGroups.matches(BeanValidator.EMPTY_VALIDATION_GROUPS_PATTERN)) { // no validationGroups available // --> get the validationGroups from the stack //String stackGroup = mctx.getFirstValidationGroupFromStack(); //if (stackGroup != null) //{ // validationGroups = stackGroup; //} //else //{ // no validationGroups on the stack // --> set the default validationGroup validationGroups = jakarta.validation.groups.Default.class.getName(); //} beanValidator.setValidationGroups(validationGroups); } } // add the validator to the component component.addValidator(validator); } } } /** * Determine if the validator with the given validatorId should be added. * * The difference here with shouldAddEnclosingValidator is the inner one has * precedence over the outer one, so a disable="true" over the same outer * validator, the inner one should ignore this condition. * * @param component * @param validatorId * @return */ @SuppressWarnings("unchecked") private boolean shouldAddEnclosingValidator( EditableValueHolder component, String validatorId) { // check if the validatorId is on the exclusion list on the component List<String> exclusionList = (List<String>) ((UIComponent) component) .getAttributes() .get(ValidatorTagHandlerDelegate.VALIDATOR_ID_EXCLUSION_LIST_KEY); if (exclusionList != null) { for (String excludedId : exclusionList) { if (excludedId.equals(validatorId)) { return false; } } } // Some extra rules are required for Bean Validation. if (validatorId.equals(BeanValidator.VALIDATOR_ID) && !ExternalSpecifications.isBeanValidationAvailable()) { // the BeanValidator was added as a default-validator, but // bean validation is not available on the classpath. // --> log a warning about this scenario. log.log(Level.WARNING, "Bean validation is not available on the " + "classpath, thus the BeanValidator will not be added for " + "the component " + component); return false; } // By default, all default validators should be added return true; } private static class PublishFaceletDynamicComponentRefreshTransientBuildCallback implements VisitCallback { public static final PublishFaceletDynamicComponentRefreshTransientBuildCallback INSTANCE = new PublishFaceletDynamicComponentRefreshTransientBuildCallback(); @Override public VisitResult visit(VisitContext context, UIComponent target) { context.getFacesContext().getApplication().publishEvent( context.getFacesContext(), FaceletDynamicComponentRefreshTransientBuildEvent.class, target.getClass(), target); return VisitResult.ACCEPT; } } }
apache/ranger
38,079
security-admin/src/test/java/org/apache/ranger/patch/TestPatchForKafkaServiceDefUpdate_J10033.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.patch; import org.apache.ranger.biz.ServiceDBStore; import org.apache.ranger.biz.XUserMgr; import org.apache.ranger.common.GUIDUtil; import org.apache.ranger.common.JSONUtil; import org.apache.ranger.common.RangerValidatorFactory; import org.apache.ranger.common.StringUtil; import org.apache.ranger.db.RangerDaoManager; import org.apache.ranger.db.XXAccessTypeDefDao; import org.apache.ranger.db.XXGroupDao; import org.apache.ranger.db.XXPolicyDao; import org.apache.ranger.db.XXPolicyItemAccessDao; import org.apache.ranger.db.XXPolicyItemDao; import org.apache.ranger.db.XXPolicyItemGroupPermDao; import org.apache.ranger.db.XXPolicyItemUserPermDao; import org.apache.ranger.db.XXPolicyResourceDao; import org.apache.ranger.db.XXPolicyResourceMapDao; import org.apache.ranger.db.XXPortalUserDao; import org.apache.ranger.db.XXResourceDefDao; import org.apache.ranger.db.XXServiceDao; import org.apache.ranger.db.XXServiceDefDao; import org.apache.ranger.db.XXUserDao; import org.apache.ranger.entity.XXAccessTypeDef; import org.apache.ranger.entity.XXGroup; import org.apache.ranger.entity.XXPolicy; import org.apache.ranger.entity.XXPolicyItem; import org.apache.ranger.entity.XXPolicyItemAccess; import org.apache.ranger.entity.XXPolicyItemGroupPerm; import org.apache.ranger.entity.XXPolicyItemUserPerm; import org.apache.ranger.entity.XXPolicyResource; import org.apache.ranger.entity.XXPolicyResourceMap; import org.apache.ranger.entity.XXPortalUser; import org.apache.ranger.entity.XXResourceDef; import org.apache.ranger.entity.XXService; import org.apache.ranger.entity.XXServiceDef; import org.apache.ranger.entity.XXUser; import org.apache.ranger.plugin.model.RangerPolicy; import org.apache.ranger.plugin.model.RangerServiceDef; import org.apache.ranger.plugin.model.validation.RangerServiceDefValidator; import org.apache.ranger.plugin.store.EmbeddedServiceDefsUtil; import org.apache.ranger.service.RangerPolicyService; import org.apache.ranger.util.CLIUtil; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.Permission; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; /** * @generated by Cursor * @description <Unit Test for PatchForKafkaServiceDefUpdate_J10033 class> */ @ExtendWith(MockitoExtension.class) @TestMethodOrder(MethodOrderer.MethodName.class) public class TestPatchForKafkaServiceDefUpdate_J10033 { private static void setIfPresent(Object target, String fieldName, Object value) { try { Field f = target.getClass().getDeclaredField(fieldName); f.setAccessible(true); f.set(target, value); } catch (NoSuchFieldException ignored) { } catch (Exception e) { throw new RuntimeException(e); } } @Test public void testExecLoadAndPrintStats() { try (MockedStatic<EmbeddedServiceDefsUtil> utilMock = Mockito.mockStatic(EmbeddedServiceDefsUtil.class)) { EmbeddedServiceDefsUtil util = Mockito.mock(EmbeddedServiceDefsUtil.class); utilMock.when(EmbeddedServiceDefsUtil::instance).thenReturn(util); PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); setIfPresent(patch, "daoMgr", Mockito.mock(RangerDaoManager.class)); setIfPresent(patch, "svcDBStore", Mockito.mock(ServiceDBStore.class)); setIfPresent(patch, "jsonUtil", new JSONUtil()); setIfPresent(patch, "policyService", Mockito.mock(RangerPolicyService.class)); setIfPresent(patch, "stringUtil", new StringUtil()); setIfPresent(patch, "validatorFactory", Mockito.mock(RangerValidatorFactory.class)); setIfPresent(patch, "svcStore", Mockito.mock(ServiceDBStore.class)); patch.execLoad(); patch.printStats(); } } @Test public void testInit_DoesNothing() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); patch.init(); } @Test public void testGetAccessTypes_andPolicyItemAccesses() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); Method getTypes = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("getAccessTypes"); getTypes.setAccessible(true); @SuppressWarnings("unchecked") List<String> types = (List<String>) getTypes.invoke(patch); assertTrue(types.contains("consume")); assertTrue(types.contains("delete")); Method getAccesses = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("getPolicyItemAccesses"); getAccesses.setAccessible(true); @SuppressWarnings("unchecked") ArrayList<RangerPolicy.RangerPolicyItemAccess> accesses = (ArrayList<RangerPolicy.RangerPolicyItemAccess>) getAccesses .invoke(patch); assertEquals(types.size(), accesses.size()); assertTrue(accesses.stream().allMatch(a -> a.getIsAllowed() != null && a.getIsAllowed())); } @Test public void testGetRangerPolicy_buildsExpected() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); Method getPolicy = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("getRangerPolicy", String.class, XXPortalUser.class, XXService.class); getPolicy.setAccessible(true); XXPortalUser user = new XXPortalUser(); user.setLoginId("admin"); XXService svc = new XXService(); svc.setName("kafka_svc"); RangerPolicy policy = (RangerPolicy) getPolicy.invoke(patch, "all - consumergroup", user, svc); assertNotNull(policy); assertTrue(policy.getResources().containsKey(PatchForKafkaServiceDefUpdate_J10033.CONSUMERGROUP_RESOURCE_NAME)); assertTrue(policy.getPolicyItems() != null && !policy.getPolicyItems().isEmpty()); } @Test public void testCreateDefaultPolicyForNewResources_serviceDefNull_returns() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXPortalUserDao portalDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoMgr.getXXPortalUser()).thenReturn(portalDao); XXPortalUser admin = new XXPortalUser(); admin.setId(1L); Mockito.when(portalDao.findByLoginId("admin")).thenReturn(admin); Mockito.when(sdefDao.findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KAFKA_NAME)).thenReturn(null); setIfPresent(patch, "daoMgr", daoMgr); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("createDefaultPolicyForNewResources"); m.setAccessible(true); // no exception when serviceDef is null m.invoke(patch); } @Test public void testCreateDefaultPolicyForNewResources_throwsWhenGroupMissing() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "guidUtil", new GUIDUtil()); // admin XXPortalUserDao portalDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoMgr.getXXPortalUser()).thenReturn(portalDao); XXPortalUser admin = new XXPortalUser(); admin.setId(1L); admin.setLoginId("admin"); Mockito.when(portalDao.findByLoginId("admin")).thenReturn(admin); // service-def exists XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXServiceDef sdef = new XXServiceDef(); sdef.setId(5L); Mockito.when(sdefDao.findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KAFKA_NAME)).thenReturn(sdef); // one service XXServiceDao svcDao = Mockito.mock(XXServiceDao.class); Mockito.when(daoMgr.getXXService()).thenReturn(svcDao); XXService xs = new XXService(); xs.setId(7L); xs.setName("k1"); Mockito.when(svcDao.findByServiceDefId(5L)).thenReturn(new ArrayList<>(Collections.singletonList(xs))); // Create policy return XXPolicyDao polDao = Mockito.mock(XXPolicyDao.class); Mockito.when(daoMgr.getXXPolicy()).thenReturn(polDao); XXPolicy created = new XXPolicy(); created.setId(100L); created.setService(7L); Mockito.when(polDao.create(Mockito.any())).thenReturn(created); // Create policy item XXPolicyItemDao piDao = Mockito.mock(XXPolicyItemDao.class); Mockito.when(daoMgr.getXXPolicyItem()).thenReturn(piDao); XXPolicyItem createdItem = new XXPolicyItem(); createdItem.setId(200L); Mockito.when(piDao.create(Mockito.any())).thenReturn(createdItem); // Access type def lookup XXAccessTypeDefDao atDao = Mockito.mock(XXAccessTypeDefDao.class); Mockito.when(daoMgr.getXXAccessTypeDef()).thenReturn(atDao); XXAccessTypeDef at = new XXAccessTypeDef(); at.setId(300L); Mockito.when(atDao.findByNameAndServiceId(Mockito.anyString(), Mockito.eq(7L))).thenReturn(at); // Users creation and lookup XXUserDao userDao = Mockito.mock(XXUserDao.class); Mockito.when(daoMgr.getXXUser()).thenReturn(userDao); XXUser kafkaUser = new XXUser(); kafkaUser.setId(10L); Mockito.when(userDao.findByUserName("kafka")).thenReturn(kafkaUser); XXUser lookupUser = new XXUser(); lookupUser.setId(11L); Mockito.when(userDao.findByUserName("rangerlookup")).thenReturn(lookupUser); // Group missing XXGroupDao groupDao = Mockito.mock(XXGroupDao.class); Mockito.when(daoMgr.getXXGroup()).thenReturn(groupDao); Mockito.when(groupDao.findByGroupName("public")).thenReturn(null); // Expect RuntimeException for missing group Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("createDefaultPolicyForNewResources"); m.setAccessible(true); assertThrows(RuntimeException.class, () -> { try { m.invoke(patch); } catch (InvocationTargetException ite) { if (ite.getTargetException() instanceof RuntimeException) { throw (RuntimeException) ite.getTargetException(); } throw new RuntimeException(ite.getTargetException()); } }); } @Test public void testCreateDefaultPolicyForNewResources_happyPath() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "guidUtil", new GUIDUtil()); // admin XXPortalUserDao portalDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoMgr.getXXPortalUser()).thenReturn(portalDao); XXPortalUser admin = new XXPortalUser(); admin.setId(1L); admin.setLoginId("admin"); Mockito.when(portalDao.findByLoginId("admin")).thenReturn(admin); // service-def exists XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXServiceDef sdef = new XXServiceDef(); sdef.setId(5L); Mockito.when(sdefDao.findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KAFKA_NAME)).thenReturn(sdef); // one service XXServiceDao svcDao = Mockito.mock(XXServiceDao.class); Mockito.when(daoMgr.getXXService()).thenReturn(svcDao); XXService xs = new XXService(); xs.setId(7L); xs.setName("k1"); Mockito.when(svcDao.findByServiceDefId(5L)).thenReturn(new ArrayList<>(Collections.singletonList(xs))); // Create policy return XXPolicyDao polDao = Mockito.mock(XXPolicyDao.class); Mockito.when(daoMgr.getXXPolicy()).thenReturn(polDao); XXPolicy created = new XXPolicy(); created.setId(100L); created.setService(7L); Mockito.when(polDao.create(Mockito.any())).thenReturn(created); // Create policy item XXPolicyItemDao piDao = Mockito.mock(XXPolicyItemDao.class); Mockito.when(daoMgr.getXXPolicyItem()).thenReturn(piDao); XXPolicyItem createdItem = new XXPolicyItem(); createdItem.setId(200L); Mockito.when(piDao.create(Mockito.any())).thenReturn(createdItem); // Access type def lookup for all accesses XXAccessTypeDefDao atDao = Mockito.mock(XXAccessTypeDefDao.class); Mockito.when(daoMgr.getXXAccessTypeDef()).thenReturn(atDao); XXAccessTypeDef at = new XXAccessTypeDef(); at.setId(300L); Mockito.when(atDao.findByNameAndServiceId(Mockito.anyString(), Mockito.eq(7L))).thenReturn(at); // Users exist XXUserDao userDao = Mockito.mock(XXUserDao.class); Mockito.when(daoMgr.getXXUser()).thenReturn(userDao); XXUser kafkaUser = new XXUser(); kafkaUser.setId(10L); Mockito.when(userDao.findByUserName("kafka")).thenReturn(kafkaUser); XXUser lookupUser = new XXUser(); lookupUser.setId(11L); Mockito.when(userDao.findByUserName("rangerlookup")).thenReturn(lookupUser); // Group exists XXGroupDao groupDao = Mockito.mock(XXGroupDao.class); Mockito.when(daoMgr.getXXGroup()).thenReturn(groupDao); XXGroup grp = new XXGroup(); grp.setId(55L); grp.setName("public"); Mockito.when(groupDao.findByGroupName("public")).thenReturn(grp); // DAOs for user/group perms and resources XXPolicyItemUserPermDao piUserPermDao = Mockito.mock(XXPolicyItemUserPermDao.class); Mockito.when(daoMgr.getXXPolicyItemUserPerm()).thenReturn(piUserPermDao); Mockito.when(piUserPermDao.create(Mockito.any())).thenReturn(new XXPolicyItemUserPerm()); XXPolicyItemGroupPermDao piGroupPermDao = Mockito.mock(XXPolicyItemGroupPermDao.class); Mockito.when(daoMgr.getXXPolicyItemGroupPerm()).thenReturn(piGroupPermDao); Mockito.when(piGroupPermDao.create(Mockito.any())).thenReturn(new XXPolicyItemGroupPerm()); XXPolicyItemAccessDao piAccessDao = Mockito.mock(XXPolicyItemAccessDao.class); Mockito.when(daoMgr.getXXPolicyItemAccess()).thenReturn(piAccessDao); Mockito.when(piAccessDao.create(Mockito.any())).thenReturn(new XXPolicyItemAccess()); XXResourceDefDao resDefDao = Mockito.mock(XXResourceDefDao.class); Mockito.when(daoMgr.getXXResourceDef()).thenReturn(resDefDao); XXResourceDef resDef = new XXResourceDef(); resDef.setId(400L); Mockito.when(resDefDao.findByNameAndPolicyId( Mockito.eq(PatchForKafkaServiceDefUpdate_J10033.CONSUMERGROUP_RESOURCE_NAME), Mockito.eq(100L))) .thenReturn(resDef); XXPolicyResourceDao polResDao = Mockito.mock(XXPolicyResourceDao.class); Mockito.when(daoMgr.getXXPolicyResource()).thenReturn(polResDao); XXPolicyResource createdRes = new XXPolicyResource(); createdRes.setId(500L); Mockito.when(polResDao.create(Mockito.any())).thenReturn(createdRes); XXPolicyResourceMapDao polResMapDao = Mockito.mock(XXPolicyResourceMapDao.class); Mockito.when(daoMgr.getXXPolicyResourceMap()).thenReturn(polResMapDao); Mockito.when(polResMapDao.create(Mockito.any())).thenReturn(new XXPolicyResourceMap()); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("createDefaultPolicyForNewResources"); m.setAccessible(true); m.invoke(patch); } @Test public void testCreateDefaultPolicyForNewResources_policyAlreadyExists_skipsCreate() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); ServiceDBStore svcDBStore = Mockito.mock(ServiceDBStore.class); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "svcDBStore", svcDBStore); setIfPresent(patch, "guidUtil", new GUIDUtil()); XXPortalUserDao portalDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoMgr.getXXPortalUser()).thenReturn(portalDao); XXPortalUser admin = new XXPortalUser(); admin.setId(1L); Mockito.when(portalDao.findByLoginId("admin")).thenReturn(admin); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); // Return null service-def to exercise early return and avoid unnecessary stubs Mockito.when(sdefDao.findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KAFKA_NAME)).thenReturn(null); Method create = PatchForKafkaServiceDefUpdate_J10033.class .getDeclaredMethod("createDefaultPolicyForNewResources"); create.setAccessible(true); create.invoke(patch); } @Test public void testUpdateKafkaServiceDef_dbServiceDefNull_noThrow() throws Exception { try (MockedStatic<EmbeddedServiceDefsUtil> utilMock = Mockito.mockStatic(EmbeddedServiceDefsUtil.class)) { EmbeddedServiceDefsUtil util = Mockito.mock(EmbeddedServiceDefsUtil.class); utilMock.when(EmbeddedServiceDefsUtil::instance).thenReturn(util); Mockito.when(util.getEmbeddedServiceDef(Mockito.anyString())).thenReturn(new RangerServiceDef()); PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXServiceDef pre = new XXServiceDef(); pre.setDefOptions("{}"); Mockito.when(sdefDao.findByName(Mockito.anyString())).thenReturn(pre); ServiceDBStore svcDBStore = Mockito.mock(ServiceDBStore.class); Mockito.when(svcDBStore.getServiceDefByName(Mockito.anyString())).thenReturn(null); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "svcDBStore", svcDBStore); setIfPresent(patch, "validatorFactory", Mockito.mock(RangerValidatorFactory.class)); setIfPresent(patch, "svcStore", svcDBStore); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("updateKafkaServiceDef"); m.setAccessible(true); m.invoke(patch); } } @Test public void testUpdateKafkaServiceDef_embeddedNull_noThrow() throws Exception { try (MockedStatic<EmbeddedServiceDefsUtil> utilMock = Mockito.mockStatic(EmbeddedServiceDefsUtil.class)) { EmbeddedServiceDefsUtil util = Mockito.mock(EmbeddedServiceDefsUtil.class); utilMock.when(EmbeddedServiceDefsUtil::instance).thenReturn(util); Mockito.when(util.getEmbeddedServiceDef(Mockito.anyString())).thenReturn(null); PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("updateKafkaServiceDef"); m.setAccessible(true); m.invoke(patch); } } @Test public void testJsonStringToMap_fallbackAndEmpty() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); setIfPresent(patch, "jsonUtil", new JSONUtil()); Method jsonToMap = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("jsonStringToMap", String.class); jsonToMap.setAccessible(true); // fallback parsing path String cfg = "a=b;;c=d;=;e="; @SuppressWarnings("unchecked") Map<String, String> map = (Map<String, String>) jsonToMap.invoke(patch, cfg); assertEquals(3, map.size()); assertEquals("b", map.get("a")); assertEquals("d", map.get("c")); assertNull(map.get("e")); // empty string => null @SuppressWarnings("unchecked") Map<String, String> empty = (Map<String, String>) jsonToMap.invoke(patch, ""); assertNull(empty); } @Test public void testJsonStringToMap_jsonSuccess() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); setIfPresent(patch, "jsonUtil", new JSONUtil()); Method jsonToMap = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("jsonStringToMap", String.class); jsonToMap.setAccessible(true); String json = "{\"a\":\"b\", \"c\":\"d\"}"; @SuppressWarnings("unchecked") Map<String, String> map = (Map<String, String>) jsonToMap.invoke(patch, json); assertEquals("b", map.get("a")); assertEquals("d", map.get("c")); } @Test public void testMapToJsonString_successAndException() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); setIfPresent(patch, "jsonUtil", new JSONUtil()); Method mapToJson = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("mapToJsonString", Map.class); mapToJson.setAccessible(true); Map<String, String> input = new HashMap<>(); input.put("a", "b"); String json = (String) mapToJson.invoke(patch, input); assertNotNull(json); JSONUtil failing = Mockito.mock(JSONUtil.class); Mockito.when(failing.readMapToString(Mockito.anyMap())).thenThrow(new RuntimeException("boom")); setIfPresent(patch, "jsonUtil", failing); String jsonNull = (String) mapToJson.invoke(patch, input); assertNull(jsonNull); } @Test public void testCheckNewKafkaresourcePresent_trueFalse() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); Method chk = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("checkNewKafkaresourcePresent", List.class); chk.setAccessible(true); List<RangerServiceDef.RangerResourceDef> resources = new ArrayList<>(); RangerServiceDef.RangerResourceDef def1 = new RangerServiceDef.RangerResourceDef(); def1.setName(PatchForKafkaServiceDefUpdate_J10033.CONSUMERGROUP_RESOURCE_NAME); resources.add(def1); boolean present = (boolean) chk.invoke(patch, resources); assertTrue(present); List<RangerServiceDef.RangerResourceDef> resources2 = new ArrayList<>(); RangerServiceDef.RangerResourceDef def2 = new RangerServiceDef.RangerResourceDef(); def2.setName("topic"); resources2.add(def2); boolean absent = (boolean) chk.invoke(patch, resources2); assertTrue(!absent); } @Test public void testUpdateKafkaServiceDef_preserveOptionValueWhenPresent() throws Exception { try (MockedStatic<EmbeddedServiceDefsUtil> utilMock = Mockito.mockStatic(EmbeddedServiceDefsUtil.class)) { EmbeddedServiceDefsUtil util = Mockito.mock(EmbeddedServiceDefsUtil.class); utilMock.when(EmbeddedServiceDefsUtil::instance).thenReturn(util); // Embedded def present RangerServiceDef embedded = new RangerServiceDef(); embedded.setResources(new ArrayList<>()); embedded.setAccessTypes(new ArrayList<>()); Mockito.when(util.getEmbeddedServiceDef(Mockito.anyString())).thenReturn(embedded); PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); ServiceDBStore svcDBStore = Mockito.mock(ServiceDBStore.class); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "svcDBStore", svcDBStore); setIfPresent(patch, "jsonUtil", new JSONUtil()); setIfPresent(patch, "validatorFactory", Mockito.mock(RangerValidatorFactory.class)); setIfPresent(patch, "svcStore", Mockito.mock(ServiceDBStore.class)); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); // Pre-update without the option triggers update path XXServiceDef pre = new XXServiceDef(); pre.setDefOptions("{}"); Mockito.when(sdefDao.findByName(Mockito.anyString())).thenReturn(pre); // DB service-def returned and updated RangerServiceDef db = new RangerServiceDef(); db.setResources(new ArrayList<>()); db.setAccessTypes(new ArrayList<>()); Mockito.when(svcDBStore.getServiceDefByName(Mockito.anyString())).thenReturn(db); RangerValidatorFactory vf = Mockito.mock(RangerValidatorFactory.class); setIfPresent(patch, "validatorFactory", vf); Mockito.when(vf.getServiceDefValidator(Mockito.any())) .thenReturn(Mockito.mock(RangerServiceDefValidator.class)); ServiceDBStore svcStore = Mockito.mock(ServiceDBStore.class); setIfPresent(patch, "svcStore", svcStore); Mockito.when(svcStore.updateServiceDef(Mockito.any())).thenReturn(new RangerServiceDef()); // Post-update serviceDef with option XXServiceDef post = new XXServiceDef(); post.setDefOptions("{\"enableDenyAndExceptionsInPolicies\":\"false\"}"); Mockito.when(sdefDao.findByName(Mockito.anyString())).thenReturn(pre, post); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("updateKafkaServiceDef"); m.setAccessible(true); m.invoke(patch); } } @Test public void testCreateDefaultPolicyForNewResources_userCreatedAndResourceDefMissingThrows() throws Exception { PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "guidUtil", new GUIDUtil()); XXPortalUserDao portalDao = Mockito.mock(XXPortalUserDao.class); Mockito.when(daoMgr.getXXPortalUser()).thenReturn(portalDao); XXPortalUser admin = new XXPortalUser(); admin.setId(1L); admin.setLoginId("admin"); Mockito.when(portalDao.findByLoginId("admin")).thenReturn(admin); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXServiceDef sdef = new XXServiceDef(); sdef.setId(5L); Mockito.when(sdefDao.findByName(EmbeddedServiceDefsUtil.EMBEDDED_SERVICEDEF_KAFKA_NAME)).thenReturn(sdef); XXServiceDao svcDao = Mockito.mock(XXServiceDao.class); Mockito.when(daoMgr.getXXService()).thenReturn(svcDao); XXService xs = new XXService(); xs.setId(7L); xs.setName("k1"); Mockito.when(svcDao.findByServiceDefId(5L)).thenReturn(new ArrayList<>(Collections.singletonList(xs))); XXPolicyDao polDao = Mockito.mock(XXPolicyDao.class); Mockito.when(daoMgr.getXXPolicy()).thenReturn(polDao); XXPolicy created = new XXPolicy(); created.setId(100L); created.setService(7L); Mockito.when(polDao.create(Mockito.any())).thenReturn(created); XXPolicyItemDao piDao = Mockito.mock(XXPolicyItemDao.class); Mockito.when(daoMgr.getXXPolicyItem()).thenReturn(piDao); XXPolicyItem createdItem = new XXPolicyItem(); createdItem.setId(200L); Mockito.when(piDao.create(Mockito.any())).thenReturn(createdItem); XXAccessTypeDefDao atDao = Mockito.mock(XXAccessTypeDefDao.class); Mockito.when(daoMgr.getXXAccessTypeDef()).thenReturn(atDao); XXAccessTypeDef at = new XXAccessTypeDef(); at.setId(300L); Mockito.when(atDao.findByNameAndServiceId(Mockito.anyString(), Mockito.eq(7L))).thenReturn(at); XXUserDao userDao = Mockito.mock(XXUserDao.class); Mockito.when(daoMgr.getXXUser()).thenReturn(userDao); // Simulate first user missing then created by xUserMgr Mockito.when(userDao.findByUserName("kafka")).thenReturn(null, new XXUser()); Mockito.when(userDao.findByUserName("rangerlookup")).thenReturn(new XXUser()); XXPolicyItemUserPermDao piUserPermDao = Mockito.mock(XXPolicyItemUserPermDao.class); Mockito.when(daoMgr.getXXPolicyItemUserPerm()).thenReturn(piUserPermDao); Mockito.when(piUserPermDao.create(Mockito.any())).thenReturn(new XXPolicyItemUserPerm()); // Access and Group perm DAOs used before resource-def lookup XXPolicyItemAccessDao piAccessDao = Mockito.mock(XXPolicyItemAccessDao.class); Mockito.when(daoMgr.getXXPolicyItemAccess()).thenReturn(piAccessDao); Mockito.when(piAccessDao.create(Mockito.any())).thenReturn(new XXPolicyItemAccess()); XXPolicyItemGroupPermDao piGroupPermDao = Mockito.mock(XXPolicyItemGroupPermDao.class); Mockito.when(daoMgr.getXXPolicyItemGroupPerm()).thenReturn(piGroupPermDao); Mockito.when(piGroupPermDao.create(Mockito.any())).thenReturn(new XXPolicyItemGroupPerm()); // Group must exist to pass group check and reach resource-def lookup XXGroupDao grpDao = Mockito.mock(XXGroupDao.class); Mockito.when(daoMgr.getXXGroup()).thenReturn(grpDao); XXGroup grp = new XXGroup(); grp.setId(77L); grp.setName("public"); Mockito.when(grpDao.findByGroupName("public")).thenReturn(grp); // Policies-by-signature should be empty to proceed with creation ServiceDBStore svcDBStore = Mockito.mock(ServiceDBStore.class); setIfPresent(patch, "svcDBStore", svcDBStore); Mockito.when(svcDBStore.getPoliciesByResourceSignature(Mockito.anyString(), Mockito.anyString(), Mockito.anyBoolean())).thenReturn(new ArrayList<>()); XXResourceDefDao resDefDao = Mockito.mock(XXResourceDefDao.class); Mockito.when(daoMgr.getXXResourceDef()).thenReturn(resDefDao); // Simulate missing resource-def to throw Mockito.when(resDefDao.findByNameAndPolicyId(Mockito.anyString(), Mockito.eq(100L))).thenReturn(null); // Tx manager + xUserMgr PlatformTransactionManager tx = Mockito.mock(PlatformTransactionManager.class); TransactionStatus status = Mockito.mock(TransactionStatus.class); Mockito.when(tx.getTransaction(Mockito.any())).thenReturn(status); setIfPresent(patch, "txManager", tx); setIfPresent(patch, "xUserMgr", Mockito.mock(XUserMgr.class)); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("createDefaultPolicyForNewResources"); m.setAccessible(true); assertThrows(RuntimeException.class, () -> { try { m.invoke(patch); } catch (InvocationTargetException ite) { if (ite.getTargetException() instanceof RuntimeException) { throw (RuntimeException) ite.getTargetException(); } throw new RuntimeException(ite.getTargetException()); } }); } @Test public void testUpdateKafkaServiceDef_updateReturnsNull_noThrow() throws Exception { try (MockedStatic<EmbeddedServiceDefsUtil> utilMock = Mockito.mockStatic(EmbeddedServiceDefsUtil.class)) { EmbeddedServiceDefsUtil util = Mockito.mock(EmbeddedServiceDefsUtil.class); utilMock.when(EmbeddedServiceDefsUtil::instance).thenReturn(util); RangerServiceDef embedded = new RangerServiceDef(); embedded.setResources(new ArrayList<>()); embedded.setAccessTypes(new ArrayList<>()); Mockito.when(util.getEmbeddedServiceDef(Mockito.anyString())).thenReturn(embedded); PatchForKafkaServiceDefUpdate_J10033 patch = new PatchForKafkaServiceDefUpdate_J10033(); RangerDaoManager daoMgr = Mockito.mock(RangerDaoManager.class); XXServiceDefDao sdefDao = Mockito.mock(XXServiceDefDao.class); Mockito.when(daoMgr.getXXServiceDef()).thenReturn(sdefDao); XXServiceDef pre = new XXServiceDef(); pre.setDefOptions("{}"); Mockito.when(sdefDao.findByName(Mockito.anyString())).thenReturn(pre); ServiceDBStore svcDBStore = Mockito.mock(ServiceDBStore.class); RangerServiceDef db = new RangerServiceDef(); db.setResources(new ArrayList<>()); db.setAccessTypes(new ArrayList<>()); Mockito.when(svcDBStore.getServiceDefByName(Mockito.anyString())).thenReturn(db); RangerValidatorFactory vf = Mockito.mock(RangerValidatorFactory.class); Mockito.when(vf.getServiceDefValidator(Mockito.any())) .thenReturn(Mockito.mock(RangerServiceDefValidator.class)); setIfPresent(patch, "daoMgr", daoMgr); setIfPresent(patch, "svcDBStore", svcDBStore); setIfPresent(patch, "validatorFactory", vf); setIfPresent(patch, "svcStore", svcDBStore); Mockito.when(svcDBStore.updateServiceDef(Mockito.any())).thenReturn(null); Method m = PatchForKafkaServiceDefUpdate_J10033.class.getDeclaredMethod("updateKafkaServiceDef"); m.setAccessible(true); m.invoke(patch); } } @Test public void testMain_Success_ExitZero_andFailureExitOne() { SecurityManager originalSm = System.getSecurityManager(); try (MockedStatic<CLIUtil> cliMock = Mockito.mockStatic(CLIUtil.class)) { PatchForKafkaServiceDefUpdate_J10033 loader = new PatchForKafkaServiceDefUpdate_J10033(); loader.setMoreToProcess(false); cliMock.when(() -> CLIUtil.getBean(PatchForKafkaServiceDefUpdate_J10033.class)).thenReturn(loader); System.setSecurityManager(new SecurityManager() { @Override public void checkExit(int status) { throw new SecurityException(String.valueOf(status)); } @Override public void checkPermission(Permission perm) { } }); try { PatchForKafkaServiceDefUpdate_J10033.main(new String[] {}); } catch (SecurityException se) { assertTrue("0".equals(se.getMessage()) || "1".equals(se.getMessage())); } } finally { System.setSecurityManager(originalSm); } // failure path SecurityManager originalSm2 = System.getSecurityManager(); try (MockedStatic<CLIUtil> cliMock = Mockito.mockStatic(CLIUtil.class)) { cliMock.when(() -> CLIUtil.getBean(PatchForKafkaServiceDefUpdate_J10033.class)) .thenThrow(new RuntimeException("boom")); System.setSecurityManager(new SecurityManager() { @Override public void checkExit(int status) { throw new SecurityException(String.valueOf(status)); } @Override public void checkPermission(Permission perm) { } }); try { PatchForKafkaServiceDefUpdate_J10033.main(new String[] {}); } catch (SecurityException se) { assertEquals("1", se.getMessage()); } } finally { System.setSecurityManager(originalSm2); } } }
googleapis/google-cloud-java
37,782
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListContentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/content.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * List content request. Returns the BASIC Content view. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListContentRequest} */ public final class ListContentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListContentRequest) ListContentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListContentRequest.newBuilder() to construct. private ListContentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListContentRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListContentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListContentRequest.class, com.google.cloud.dataplex.v1.ListContentRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Maximum number of content to return. The service may return fewer * than this value. If unspecified, at most 10 content will be returned. The * maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.ListContentRequest)) { return super.equals(obj); } com.google.cloud.dataplex.v1.ListContentRequest other = (com.google.cloud.dataplex.v1.ListContentRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListContentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.ListContentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List content request. Returns the BASIC Content view. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListContentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListContentRequest) com.google.cloud.dataplex.v1.ListContentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListContentRequest.class, com.google.cloud.dataplex.v1.ListContentRequest.Builder.class); } // Construct using com.google.cloud.dataplex.v1.ListContentRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.ContentProto .internal_static_google_cloud_dataplex_v1_ListContentRequest_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentRequest getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.ListContentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentRequest build() { com.google.cloud.dataplex.v1.ListContentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentRequest buildPartial() { com.google.cloud.dataplex.v1.ListContentRequest result = new com.google.cloud.dataplex.v1.ListContentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataplex.v1.ListContentRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.ListContentRequest) { return mergeFrom((com.google.cloud.dataplex.v1.ListContentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.ListContentRequest other) { if (other == com.google.cloud.dataplex.v1.ListContentRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the parent lake: * projects/{project_id}/locations/{location_id}/lakes/{lake_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Maximum number of content to return. The service may return fewer * than this value. If unspecified, at most 10 content will be returned. The * maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Maximum number of content to return. The service may return fewer * than this value. If unspecified, at most 10 content will be returned. The * maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Maximum number of content to return. The service may return fewer * than this value. If unspecified, at most 10 content will be returned. The * maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. Page token received from a previous `ListContent` call. Provide * this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListContent` must match the call that provided the page * token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Filter request. Filters are case-sensitive. * The following formats are supported: * * labels.key1 = "value1" * labels:key1 * type = "NOTEBOOK" * type = "SQL_SCRIPT" * * These restrictions can be coinjoined with AND, OR and NOT conjunctions. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListContentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListContentRequest) private static final com.google.cloud.dataplex.v1.ListContentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListContentRequest(); } public static com.google.cloud.dataplex.v1.ListContentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListContentRequest> PARSER = new com.google.protobuf.AbstractParser<ListContentRequest>() { @java.lang.Override public ListContentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListContentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListContentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.ListContentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,792
java-developerconnect/proto-google-cloud-developerconnect-v1/src/main/java/com/google/cloud/developerconnect/v1/InstallationState.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/developerconnect/v1/developer_connect.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.developerconnect.v1; /** * * * <pre> * Describes stage and necessary actions to be taken by the * user to complete the installation. Used for GitHub and GitHub Enterprise * based connections. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.InstallationState} */ public final class InstallationState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.developerconnect.v1.InstallationState) InstallationStateOrBuilder { private static final long serialVersionUID = 0L; // Use InstallationState.newBuilder() to construct. private InstallationState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InstallationState() { stage_ = 0; message_ = ""; actionUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InstallationState(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_InstallationState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_InstallationState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.InstallationState.class, com.google.cloud.developerconnect.v1.InstallationState.Builder.class); } /** * * * <pre> * Stage of the installation process. * </pre> * * Protobuf enum {@code google.cloud.developerconnect.v1.InstallationState.Stage} */ public enum Stage implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * No stage specified. * </pre> * * <code>STAGE_UNSPECIFIED = 0;</code> */ STAGE_UNSPECIFIED(0), /** * * * <pre> * Only for GitHub Enterprise. An App creation has been requested. * The user needs to confirm the creation in their GitHub enterprise host. * </pre> * * <code>PENDING_CREATE_APP = 1;</code> */ PENDING_CREATE_APP(1), /** * * * <pre> * User needs to authorize the GitHub (or Enterprise) App via OAuth. * </pre> * * <code>PENDING_USER_OAUTH = 2;</code> */ PENDING_USER_OAUTH(2), /** * * * <pre> * User needs to follow the link to install the GitHub (or Enterprise) App. * </pre> * * <code>PENDING_INSTALL_APP = 3;</code> */ PENDING_INSTALL_APP(3), /** * * * <pre> * Installation process has been completed. * </pre> * * <code>COMPLETE = 10;</code> */ COMPLETE(10), UNRECOGNIZED(-1), ; /** * * * <pre> * No stage specified. * </pre> * * <code>STAGE_UNSPECIFIED = 0;</code> */ public static final int STAGE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Only for GitHub Enterprise. An App creation has been requested. * The user needs to confirm the creation in their GitHub enterprise host. * </pre> * * <code>PENDING_CREATE_APP = 1;</code> */ public static final int PENDING_CREATE_APP_VALUE = 1; /** * * * <pre> * User needs to authorize the GitHub (or Enterprise) App via OAuth. * </pre> * * <code>PENDING_USER_OAUTH = 2;</code> */ public static final int PENDING_USER_OAUTH_VALUE = 2; /** * * * <pre> * User needs to follow the link to install the GitHub (or Enterprise) App. * </pre> * * <code>PENDING_INSTALL_APP = 3;</code> */ public static final int PENDING_INSTALL_APP_VALUE = 3; /** * * * <pre> * Installation process has been completed. * </pre> * * <code>COMPLETE = 10;</code> */ public static final int COMPLETE_VALUE = 10; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static Stage valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static Stage forNumber(int value) { switch (value) { case 0: return STAGE_UNSPECIFIED; case 1: return PENDING_CREATE_APP; case 2: return PENDING_USER_OAUTH; case 3: return PENDING_INSTALL_APP; case 10: return COMPLETE; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<Stage> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<Stage> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<Stage>() { public Stage findValueByNumber(int number) { return Stage.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.developerconnect.v1.InstallationState.getDescriptor() .getEnumTypes() .get(0); } private static final Stage[] VALUES = values(); public static Stage valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private Stage(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.developerconnect.v1.InstallationState.Stage) } public static final int STAGE_FIELD_NUMBER = 1; private int stage_ = 0; /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for stage. */ @java.lang.Override public int getStageValue() { return stage_; } /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The stage. */ @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState.Stage getStage() { com.google.cloud.developerconnect.v1.InstallationState.Stage result = com.google.cloud.developerconnect.v1.InstallationState.Stage.forNumber(stage_); return result == null ? com.google.cloud.developerconnect.v1.InstallationState.Stage.UNRECOGNIZED : result; } public static final int MESSAGE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object message_ = ""; /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The message. */ @java.lang.Override public java.lang.String getMessage() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } } /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for message. */ @java.lang.Override public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ACTION_URI_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object actionUri_ = ""; /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The actionUri. */ @java.lang.Override public java.lang.String getActionUri() { java.lang.Object ref = actionUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); actionUri_ = s; return s; } } /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for actionUri. */ @java.lang.Override public com.google.protobuf.ByteString getActionUriBytes() { java.lang.Object ref = actionUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); actionUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (stage_ != com.google.cloud.developerconnect.v1.InstallationState.Stage.STAGE_UNSPECIFIED .getNumber()) { output.writeEnum(1, stage_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, actionUri_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (stage_ != com.google.cloud.developerconnect.v1.InstallationState.Stage.STAGE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, stage_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, actionUri_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.developerconnect.v1.InstallationState)) { return super.equals(obj); } com.google.cloud.developerconnect.v1.InstallationState other = (com.google.cloud.developerconnect.v1.InstallationState) obj; if (stage_ != other.stage_) return false; if (!getMessage().equals(other.getMessage())) return false; if (!getActionUri().equals(other.getActionUri())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + STAGE_FIELD_NUMBER; hash = (53 * hash) + stage_; hash = (37 * hash) + MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getMessage().hashCode(); hash = (37 * hash) + ACTION_URI_FIELD_NUMBER; hash = (53 * hash) + getActionUri().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.InstallationState parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.InstallationState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.developerconnect.v1.InstallationState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.developerconnect.v1.InstallationState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Describes stage and necessary actions to be taken by the * user to complete the installation. Used for GitHub and GitHub Enterprise * based connections. * </pre> * * Protobuf type {@code google.cloud.developerconnect.v1.InstallationState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.developerconnect.v1.InstallationState) com.google.cloud.developerconnect.v1.InstallationStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_InstallationState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_InstallationState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.developerconnect.v1.InstallationState.class, com.google.cloud.developerconnect.v1.InstallationState.Builder.class); } // Construct using com.google.cloud.developerconnect.v1.InstallationState.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; stage_ = 0; message_ = ""; actionUri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.developerconnect.v1.DeveloperConnectProto .internal_static_google_cloud_developerconnect_v1_InstallationState_descriptor; } @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState getDefaultInstanceForType() { return com.google.cloud.developerconnect.v1.InstallationState.getDefaultInstance(); } @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState build() { com.google.cloud.developerconnect.v1.InstallationState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState buildPartial() { com.google.cloud.developerconnect.v1.InstallationState result = new com.google.cloud.developerconnect.v1.InstallationState(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.developerconnect.v1.InstallationState result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.stage_ = stage_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.message_ = message_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.actionUri_ = actionUri_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.developerconnect.v1.InstallationState) { return mergeFrom((com.google.cloud.developerconnect.v1.InstallationState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.developerconnect.v1.InstallationState other) { if (other == com.google.cloud.developerconnect.v1.InstallationState.getDefaultInstance()) return this; if (other.stage_ != 0) { setStageValue(other.getStageValue()); } if (!other.getMessage().isEmpty()) { message_ = other.message_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getActionUri().isEmpty()) { actionUri_ = other.actionUri_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { stage_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { message_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { actionUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int stage_ = 0; /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for stage. */ @java.lang.Override public int getStageValue() { return stage_; } /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for stage to set. * @return This builder for chaining. */ public Builder setStageValue(int value) { stage_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The stage. */ @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState.Stage getStage() { com.google.cloud.developerconnect.v1.InstallationState.Stage result = com.google.cloud.developerconnect.v1.InstallationState.Stage.forNumber(stage_); return result == null ? com.google.cloud.developerconnect.v1.InstallationState.Stage.UNRECOGNIZED : result; } /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The stage to set. * @return This builder for chaining. */ public Builder setStage(com.google.cloud.developerconnect.v1.InstallationState.Stage value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; stage_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. Current step of the installation process. * </pre> * * <code> * .google.cloud.developerconnect.v1.InstallationState.Stage stage = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearStage() { bitField0_ = (bitField0_ & ~0x00000001); stage_ = 0; onChanged(); return this; } private java.lang.Object message_ = ""; /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The message. */ public java.lang.String getMessage() { java.lang.Object ref = message_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); message_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for message. */ public com.google.protobuf.ByteString getMessageBytes() { java.lang.Object ref = message_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); message_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The message to set. * @return This builder for chaining. */ public Builder setMessage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } message_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearMessage() { message_ = getDefaultInstance().getMessage(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. Message of what the user should do next to continue the * installation. Empty string if the installation is already complete. * </pre> * * <code>string message = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for message to set. * @return This builder for chaining. */ public Builder setMessageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); message_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object actionUri_ = ""; /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The actionUri. */ public java.lang.String getActionUri() { java.lang.Object ref = actionUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); actionUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for actionUri. */ public com.google.protobuf.ByteString getActionUriBytes() { java.lang.Object ref = actionUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); actionUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The actionUri to set. * @return This builder for chaining. */ public Builder setActionUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } actionUri_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearActionUri() { actionUri_ = getDefaultInstance().getActionUri(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. Link to follow for next action. Empty string if the * installation is already complete. * </pre> * * <code>string action_uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for actionUri to set. * @return This builder for chaining. */ public Builder setActionUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); actionUri_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.developerconnect.v1.InstallationState) } // @@protoc_insertion_point(class_scope:google.cloud.developerconnect.v1.InstallationState) private static final com.google.cloud.developerconnect.v1.InstallationState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.developerconnect.v1.InstallationState(); } public static com.google.cloud.developerconnect.v1.InstallationState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InstallationState> PARSER = new com.google.protobuf.AbstractParser<InstallationState>() { @java.lang.Override public InstallationState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InstallationState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InstallationState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.developerconnect.v1.InstallationState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,844
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/BigQueryDateShardedSpec.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/table_spec.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; /** * * * <pre> * Specification for a group of BigQuery tables with the `[prefix]YYYYMMDD` name * pattern. * * For more information, see [Introduction to partitioned tables] * (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.BigQueryDateShardedSpec} */ public final class BigQueryDateShardedSpec extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.BigQueryDateShardedSpec) BigQueryDateShardedSpecOrBuilder { private static final long serialVersionUID = 0L; // Use BigQueryDateShardedSpec.newBuilder() to construct. private BigQueryDateShardedSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BigQueryDateShardedSpec() { dataset_ = ""; tablePrefix_ = ""; latestShardResource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BigQueryDateShardedSpec(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1_BigQueryDateShardedSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1_BigQueryDateShardedSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.class, com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.Builder.class); } public static final int DATASET_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object dataset_ = ""; /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The dataset. */ @java.lang.Override public java.lang.String getDataset() { java.lang.Object ref = dataset_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataset_ = s; return s; } } /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataset. */ @java.lang.Override public com.google.protobuf.ByteString getDatasetBytes() { java.lang.Object ref = dataset_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataset_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TABLE_PREFIX_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object tablePrefix_ = ""; /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The tablePrefix. */ @java.lang.Override public java.lang.String getTablePrefix() { java.lang.Object ref = tablePrefix_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tablePrefix_ = s; return s; } } /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for tablePrefix. */ @java.lang.Override public com.google.protobuf.ByteString getTablePrefixBytes() { java.lang.Object ref = tablePrefix_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tablePrefix_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SHARD_COUNT_FIELD_NUMBER = 3; private long shardCount_ = 0L; /** * * * <pre> * Output only. Total number of shards. * </pre> * * <code>int64 shard_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The shardCount. */ @java.lang.Override public long getShardCount() { return shardCount_; } public static final int LATEST_SHARD_RESOURCE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object latestShardResource_ = ""; /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The latestShardResource. */ @java.lang.Override public java.lang.String getLatestShardResource() { java.lang.Object ref = latestShardResource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); latestShardResource_ = s; return s; } } /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for latestShardResource. */ @java.lang.Override public com.google.protobuf.ByteString getLatestShardResourceBytes() { java.lang.Object ref = latestShardResource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); latestShardResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataset_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, dataset_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tablePrefix_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tablePrefix_); } if (shardCount_ != 0L) { output.writeInt64(3, shardCount_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(latestShardResource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, latestShardResource_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(dataset_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, dataset_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tablePrefix_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tablePrefix_); } if (shardCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, shardCount_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(latestShardResource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, latestShardResource_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec)) { return super.equals(obj); } com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec other = (com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec) obj; if (!getDataset().equals(other.getDataset())) return false; if (!getTablePrefix().equals(other.getTablePrefix())) return false; if (getShardCount() != other.getShardCount()) return false; if (!getLatestShardResource().equals(other.getLatestShardResource())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DATASET_FIELD_NUMBER; hash = (53 * hash) + getDataset().hashCode(); hash = (37 * hash) + TABLE_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getTablePrefix().hashCode(); hash = (37 * hash) + SHARD_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getShardCount()); hash = (37 * hash) + LATEST_SHARD_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getLatestShardResource().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specification for a group of BigQuery tables with the `[prefix]YYYYMMDD` name * pattern. * * For more information, see [Introduction to partitioned tables] * (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1.BigQueryDateShardedSpec} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.BigQueryDateShardedSpec) com.google.cloud.datacatalog.v1.BigQueryDateShardedSpecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1_BigQueryDateShardedSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1_BigQueryDateShardedSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.class, com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.Builder.class); } // Construct using com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; dataset_ = ""; tablePrefix_ = ""; shardCount_ = 0L; latestShardResource_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1_BigQueryDateShardedSpec_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec build() { com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec buildPartial() { com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec result = new com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.dataset_ = dataset_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.tablePrefix_ = tablePrefix_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.shardCount_ = shardCount_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.latestShardResource_ = latestShardResource_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec) { return mergeFrom((com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec other) { if (other == com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec.getDefaultInstance()) return this; if (!other.getDataset().isEmpty()) { dataset_ = other.dataset_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getTablePrefix().isEmpty()) { tablePrefix_ = other.tablePrefix_; bitField0_ |= 0x00000002; onChanged(); } if (other.getShardCount() != 0L) { setShardCount(other.getShardCount()); } if (!other.getLatestShardResource().isEmpty()) { latestShardResource_ = other.latestShardResource_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { dataset_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { tablePrefix_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { shardCount_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { latestShardResource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object dataset_ = ""; /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The dataset. */ public java.lang.String getDataset() { java.lang.Object ref = dataset_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); dataset_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for dataset. */ public com.google.protobuf.ByteString getDatasetBytes() { java.lang.Object ref = dataset_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); dataset_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The dataset to set. * @return This builder for chaining. */ public Builder setDataset(java.lang.String value) { if (value == null) { throw new NullPointerException(); } dataset_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearDataset() { dataset_ = getDefaultInstance().getDataset(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. The Data Catalog resource name of the dataset entry the * current table belongs to. For example: * * `projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`. * </pre> * * <code> * string dataset = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for dataset to set. * @return This builder for chaining. */ public Builder setDatasetBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); dataset_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object tablePrefix_ = ""; /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The tablePrefix. */ public java.lang.String getTablePrefix() { java.lang.Object ref = tablePrefix_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tablePrefix_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for tablePrefix. */ public com.google.protobuf.ByteString getTablePrefixBytes() { java.lang.Object ref = tablePrefix_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tablePrefix_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The tablePrefix to set. * @return This builder for chaining. */ public Builder setTablePrefix(java.lang.String value) { if (value == null) { throw new NullPointerException(); } tablePrefix_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearTablePrefix() { tablePrefix_ = getDefaultInstance().getTablePrefix(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. The table name prefix of the shards. * * The name of any given shard is `[table_prefix]YYYYMMDD`. * For example, for the `MyTable20180101` shard, the * `table_prefix` is `MyTable`. * </pre> * * <code>string table_prefix = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for tablePrefix to set. * @return This builder for chaining. */ public Builder setTablePrefixBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tablePrefix_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long shardCount_; /** * * * <pre> * Output only. Total number of shards. * </pre> * * <code>int64 shard_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The shardCount. */ @java.lang.Override public long getShardCount() { return shardCount_; } /** * * * <pre> * Output only. Total number of shards. * </pre> * * <code>int64 shard_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The shardCount to set. * @return This builder for chaining. */ public Builder setShardCount(long value) { shardCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Total number of shards. * </pre> * * <code>int64 shard_count = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearShardCount() { bitField0_ = (bitField0_ & ~0x00000004); shardCount_ = 0L; onChanged(); return this; } private java.lang.Object latestShardResource_ = ""; /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The latestShardResource. */ public java.lang.String getLatestShardResource() { java.lang.Object ref = latestShardResource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); latestShardResource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for latestShardResource. */ public com.google.protobuf.ByteString getLatestShardResourceBytes() { java.lang.Object ref = latestShardResource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); latestShardResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The latestShardResource to set. * @return This builder for chaining. */ public Builder setLatestShardResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } latestShardResource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearLatestShardResource() { latestShardResource_ = getDefaultInstance().getLatestShardResource(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Output only. BigQuery resource name of the latest shard. * </pre> * * <code>string latest_shard_resource = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for latestShardResource to set. * @return This builder for chaining. */ public Builder setLatestShardResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); latestShardResource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.BigQueryDateShardedSpec) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.BigQueryDateShardedSpec) private static final com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec(); } public static com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BigQueryDateShardedSpec> PARSER = new com.google.protobuf.AbstractParser<BigQueryDateShardedSpec>() { @java.lang.Override public BigQueryDateShardedSpec parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BigQueryDateShardedSpec> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BigQueryDateShardedSpec> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/helix
38,413
helix-core/src/main/java/org/apache/helix/controller/rebalancer/waged/model/ClusterModelProvider.java
package org.apache.helix.controller.rebalancer.waged.model; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.helix.HelixConstants; import org.apache.helix.HelixException; import org.apache.helix.controller.dataproviders.ResourceControllerDataProvider; import org.apache.helix.controller.rebalancer.util.DelayedRebalanceUtil; import org.apache.helix.model.ClusterConfig; import org.apache.helix.model.ClusterTopologyConfig; import org.apache.helix.model.IdealState; import org.apache.helix.model.InstanceConfig; import org.apache.helix.model.Partition; import org.apache.helix.model.Resource; import org.apache.helix.model.ResourceAssignment; import org.apache.helix.model.ResourceConfig; import org.apache.helix.model.StateModelDefinition; /** * This util class generates Cluster Model object based on the controller's data cache. */ public class ClusterModelProvider { private enum RebalanceScopeType { // Set the rebalance scope to cover the difference between the current assignment and the // Baseline assignment only. PARTIAL, // Set the rebalance scope to cover all replicas that need relocation based on the cluster // changes. GLOBAL_BASELINE, // Set the rebalance scope to cover only replicas that are assigned to downed instances. EMERGENCY, // A temporary overwrites for partition replicas on downed instance but still within the delayed window but missing // minActiveReplicas DELAYED_REBALANCE_OVERWRITES } /** * TODO: On integration with WAGED, have to integrate with counter and latency metrics -- qqu * Compute a new Cluster Model with scope limited to partitions with best possible assignment missing minActiveReplicas * because of delayed rebalance setting. * @param dataProvider The controller's data cache * @param resourceMap The full map of the resource by name * @param activeInstances The active instances that will be used in the calculation. * @param resourceAssignment The resource assignment state to compute on. This should be the current state assignment; * if it's run right after another rebalance calculation, the best possible assignment from * previous result can be used. * @return the ClusterModel */ public static ClusterModel generateClusterModelForDelayedRebalanceOverwrites( ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<String> activeInstances, Map<String, ResourceAssignment> resourceAssignment) { return generateClusterModel(dataProvider, resourceMap, activeInstances, Collections.emptyMap(), Collections.emptyMap(), resourceAssignment, RebalanceScopeType.DELAYED_REBALANCE_OVERWRITES); } /** * Generate a new Cluster Model object according to the current cluster status for emergency * rebalance. The rebalance scope is configured for recovering replicas that are on permanently * downed nodes * @param dataProvider The controller's data cache. * @param resourceMap The full list of the resources to be rebalanced. Note that any * resources that are not in this list will be removed from the * final assignment. * @param activeInstances The active instances that will be used in the calculation. * Note this list can be different from the real active node list * according to the rebalancer logic. * @param bestPossibleAssignment The persisted Best Possible assignment that was generated in the * previous rebalance. * @return the new cluster model */ public static ClusterModel generateClusterModelForEmergencyRebalance(ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<String> activeInstances, Map<String, ResourceAssignment> bestPossibleAssignment) { return generateClusterModel(dataProvider, resourceMap, activeInstances, Collections.emptyMap(), Collections.emptyMap(), bestPossibleAssignment, RebalanceScopeType.EMERGENCY); } /** * Generate a new Cluster Model object according to the current cluster status for partial * rebalance. The rebalance scope is configured for recovering the missing replicas that are in * the Baseline assignment but not in the current Best possible assignment only. * @param dataProvider The controller's data cache. * @param resourceMap The full list of the resources to be rebalanced. Note that any * resources that are not in this list will be removed from the * final assignment. * @param activeInstances The active instances that will be used in the calculation. * Note this list can be different from the real active node list * according to the rebalancer logic. * @param baselineAssignment The persisted Baseline assignment. * @param bestPossibleAssignment The persisted Best Possible assignment that was generated in the * previous rebalance. * @return the new cluster model */ public static ClusterModel generateClusterModelForPartialRebalance( ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<String> activeInstances, Map<String, ResourceAssignment> baselineAssignment, Map<String, ResourceAssignment> bestPossibleAssignment) { return generateClusterModel(dataProvider, resourceMap, activeInstances, Collections.emptyMap(), baselineAssignment, bestPossibleAssignment, RebalanceScopeType.PARTIAL); } /** * Generate a new Cluster Model object according to the current cluster status for the Baseline * calculation. The rebalance scope is determined according to the cluster changes. * @param dataProvider The controller's data cache. * @param resourceMap The full list of the resources to be rebalanced. Note that any * resources that are not in this list will be removed from the * final assignment. * @param allInstances All the instances that will be used in the calculation. * @param clusterChanges All the cluster changes that happened after the previous rebalance. * @param baselineAssignment The previous Baseline assignment. * @return the new cluster model */ public static ClusterModel generateClusterModelForBaseline( ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<String> allInstances, Map<HelixConstants.ChangeType, Set<String>> clusterChanges, Map<String, ResourceAssignment> baselineAssignment) { return generateClusterModel(dataProvider, resourceMap, allInstances, clusterChanges, Collections.emptyMap(), baselineAssignment, RebalanceScopeType.GLOBAL_BASELINE); } /** * Generate a cluster model based on the current state output and data cache. The rebalance scope * is configured for recovering the missing replicas only. * @param dataProvider The controller's data cache. * @param resourceMap The full list of the resources to be rebalanced. Note that any * resources that are not in this list will be removed from the * final assignment. * @param currentStateAssignment The resource assignment built from current state output. * @return the new cluster model */ public static ClusterModel generateClusterModelFromExistingAssignment( ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Map<String, ResourceAssignment> currentStateAssignment) { return generateClusterModel(dataProvider, resourceMap, dataProvider.getEnabledLiveInstances(), Collections.emptyMap(), Collections.emptyMap(), currentStateAssignment, RebalanceScopeType.GLOBAL_BASELINE); } /** * Generate a new Cluster Model object according to the current cluster status. * @param dataProvider The controller's data cache. * @param resourceMap The full list of the resources to be rebalanced. Note that any * resources that are not in this list will be removed from the * final assignment. * @param activeInstances The active instances that will be used in the calculation. * Note this list can be different from the real active node list * according to the rebalancer logic. * @param clusterChanges All the cluster changes that happened after the previous rebalance. * @param idealAssignment The ideal assignment. * @param currentAssignment The current assignment that was generated in the previous rebalance. * @param scopeType Specify how to determine the rebalance scope. * @return the new cluster model */ private static ClusterModel generateClusterModel(ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<String> activeInstances, Map<HelixConstants.ChangeType, Set<String>> clusterChanges, Map<String, ResourceAssignment> idealAssignment, Map<String, ResourceAssignment> currentAssignment, RebalanceScopeType scopeType) { Map<String, InstanceConfig> assignableInstanceConfigMap = dataProvider.getAssignableInstanceConfigMap(); // Construct all the assignable nodes and initialize with the allocated replicas. Set<AssignableNode> assignableNodes = getAllAssignableNodes(dataProvider.getClusterConfig(), assignableInstanceConfigMap, activeInstances); // Generate the logical view of the ideal assignment and the current assignment. ClusterTopologyConfig clusterTopologyConfig = ClusterTopologyConfig.createFromClusterConfig(dataProvider.getClusterConfig()); Map<String, ResourceAssignment> logicalIdIdealAssignment = idealAssignment.isEmpty() ? idealAssignment : generateResourceAssignmentMapLogicalIdView(idealAssignment, clusterTopologyConfig, dataProvider); Map<String, ResourceAssignment> logicalIdCurrentAssignment = currentAssignment.isEmpty() ? currentAssignment : generateResourceAssignmentMapLogicalIdView(currentAssignment, clusterTopologyConfig, dataProvider); // Get the set of active logical ids. Set<String> activeLogicalIds = activeInstances.stream().map( instanceName -> assignableInstanceConfigMap.getOrDefault(instanceName, new InstanceConfig(instanceName)) .getLogicalId(clusterTopologyConfig.getEndNodeType())).collect(Collectors.toSet()); // TODO: Figure out why streaming the keySet directly in rare cases causes ConcurrentModificationException // In theory, this should not be happening since cache refresh is at beginning of the pipeline, so could be some other reason. // For now, we just copy the keySet to a new HashSet to avoid the exception. Set<String> assignableLiveInstanceNames = new HashSet<>(dataProvider.getAssignableLiveInstances().keySet()); Set<String> assignableLiveInstanceLogicalIds = assignableLiveInstanceNames.stream().map( instanceName -> assignableInstanceConfigMap.getOrDefault(instanceName, new InstanceConfig(instanceName)) .getLogicalId(clusterTopologyConfig.getEndNodeType())).collect(Collectors.toSet()); // Generate replica objects for all the resource partitions. // <resource, replica set> Map<String, Set<AssignableReplica>> replicaMap = getAllAssignableReplicas(dataProvider, resourceMap, assignableNodes); // Check if the replicas need to be reassigned. Map<String, Set<AssignableReplica>> allocatedReplicas = new HashMap<>(); // <instanceName, replica set> Set<AssignableReplica> toBeAssignedReplicas; switch (scopeType) { case GLOBAL_BASELINE: toBeAssignedReplicas = findToBeAssignedReplicasByClusterChanges(replicaMap, activeLogicalIds, assignableLiveInstanceLogicalIds, clusterChanges, logicalIdCurrentAssignment, allocatedReplicas); break; case PARTIAL: // Filter to remove the replicas that do not exist in the ideal assignment given but exist // in the replicaMap. This is because such replicas are new additions that do not need to be // rebalanced right away. retainExistingReplicas(replicaMap, logicalIdIdealAssignment); toBeAssignedReplicas = findToBeAssignedReplicasByComparingWithIdealAssignment(replicaMap, activeLogicalIds, logicalIdIdealAssignment, logicalIdCurrentAssignment, allocatedReplicas); break; case EMERGENCY: toBeAssignedReplicas = findToBeAssignedReplicasOnDownInstances(replicaMap, activeLogicalIds, logicalIdCurrentAssignment, allocatedReplicas); break; case DELAYED_REBALANCE_OVERWRITES: toBeAssignedReplicas = DelayedRebalanceUtil.findToBeAssignedReplicasForMinActiveReplica(dataProvider, replicaMap.keySet(), activeLogicalIds, logicalIdCurrentAssignment, allocatedReplicas); break; default: throw new HelixException("Unknown rebalance scope type: " + scopeType); } // Update the allocated replicas to the assignable nodes. assignableNodes.parallelStream().forEach(node -> node.assignInitBatch( allocatedReplicas.getOrDefault(node.getLogicalId(), Collections.emptySet()))); // Construct and initialize cluster context. ClusterContext context = new ClusterContext( replicaMap.values().stream().flatMap(Set::stream).collect(Collectors.toSet()), assignableNodes, logicalIdIdealAssignment, logicalIdCurrentAssignment, dataProvider.getClusterConfig()); // Initial the cluster context with the allocated assignments. context.setAssignmentForFaultZoneMap(mapAssignmentToFaultZone(assignableNodes)); return new ClusterModel(context, toBeAssignedReplicas, assignableNodes); } private static Map<String, ResourceAssignment> generateResourceAssignmentMapLogicalIdView( Map<String, ResourceAssignment> resourceAssignmentMap, ClusterTopologyConfig clusterTopologyConfig, ResourceControllerDataProvider dataProvider) { Map<String, InstanceConfig> allInstanceConfigMap = dataProvider.getInstanceConfigMap(); return resourceAssignmentMap.entrySet().parallelStream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> { String resourceName = entry.getKey(); ResourceAssignment instanceNameResourceAssignment = entry.getValue(); ResourceAssignment logicalIdResourceAssignment = new ResourceAssignment(resourceName); StateModelDefinition stateModelDefinition = dataProvider.getStateModelDef( dataProvider.getIdealState(resourceName).getStateModelDefRef()); instanceNameResourceAssignment.getMappedPartitions().forEach(partition -> { Map<String, String> logicalIdStateMap = new HashMap<>(); instanceNameResourceAssignment.getReplicaMap(partition) .forEach((instanceName, state) -> { if (allInstanceConfigMap.containsKey(instanceName)) { String logicalId = allInstanceConfigMap.get(instanceName) .getLogicalId(clusterTopologyConfig.getEndNodeType()); if (!logicalIdStateMap.containsKey(logicalId) || state.equals( stateModelDefinition.getTopState())) { logicalIdStateMap.put(logicalId, state); } } }); logicalIdResourceAssignment.addReplicaMap(partition, logicalIdStateMap); }); return logicalIdResourceAssignment; })); } // Filter the replicas map so only the replicas that have been allocated in the existing // assignmentMap remain in the map. private static void retainExistingReplicas(Map<String, Set<AssignableReplica>> replicaMap, Map<String, ResourceAssignment> assignmentMap) { replicaMap.entrySet().parallelStream().forEach(replicaSetEntry -> { // <partition, <state, instances set>> Map<String, Map<String, Set<String>>> stateInstanceMap = getStateInstanceMap(assignmentMap.get(replicaSetEntry.getKey())); // Iterate the replicas of the resource to find the ones that require reallocating. Iterator<AssignableReplica> replicaIter = replicaSetEntry.getValue().iterator(); while (replicaIter.hasNext()) { AssignableReplica replica = replicaIter.next(); Set<String> validInstances = stateInstanceMap.getOrDefault(replica.getPartitionName(), Collections.emptyMap()) .getOrDefault(replica.getReplicaState(), Collections.emptySet()); if (validInstances.isEmpty()) { // Removing the replica if it is not known in the assignment map. replicaIter.remove(); } else { // Remove the instance from the state map record after processing so it won't be // double-processed as we loop through all replica validInstances.remove(validInstances.iterator().next()); } } }); } /** * Find the minimum set of replicas that need to be reassigned by comparing the current assignment * with the ideal assignment. * A replica needs to be reassigned or newly assigned if either of the following conditions is true: * 1. The partition allocation (the instance the replica is placed on) in the ideal assignment and * the current assignment are different. And the allocation in the ideal assignment is valid. * So it is worthwhile to move it. * 2. The partition allocation is in neither the ideal assignment nor the current assignment. Or * those allocations are not valid due to offline or disabled instances. * Otherwise, the rebalancer just keeps the current assignment allocation. * * @param replicaMap A map contains all the replicas grouped by resource name. * @param activeInstances All the instances that are live and enabled according to the delay rebalance configuration. * @param idealAssignment The ideal assignment. * @param currentAssignment The current assignment that was generated in the previous rebalance. * @param allocatedReplicas A map of <Instance -> replicas> to return the allocated replicas grouped by the target instance name. * @return The replicas that need to be reassigned. */ private static Set<AssignableReplica> findToBeAssignedReplicasByComparingWithIdealAssignment( Map<String, Set<AssignableReplica>> replicaMap, Set<String> activeInstances, Map<String, ResourceAssignment> idealAssignment, Map<String, ResourceAssignment> currentAssignment, Map<String, Set<AssignableReplica>> allocatedReplicas) { Set<AssignableReplica> toBeAssignedReplicas = new HashSet<>(); // check each resource to identify the allocated replicas and to-be-assigned replicas. for (String resourceName : replicaMap.keySet()) { // <partition, <state, instances set>> Map<String, Map<String, Set<String>>> idealPartitionStateMap = getValidStateInstanceMap(idealAssignment.get(resourceName), activeInstances); Map<String, Map<String, Set<String>>> currentPartitionStateMap = getValidStateInstanceMap(currentAssignment.get(resourceName), activeInstances); // Iterate the replicas of the resource to find the ones that require reallocating. for (AssignableReplica replica : replicaMap.get(resourceName)) { String partitionName = replica.getPartitionName(); String replicaState = replica.getReplicaState(); Set<String> idealAllocations = idealPartitionStateMap.getOrDefault(partitionName, Collections.emptyMap()) .getOrDefault(replicaState, Collections.emptySet()); Set<String> currentAllocations = currentPartitionStateMap.getOrDefault(partitionName, Collections.emptyMap()) .getOrDefault(replicaState, Collections.emptySet()); // Compare the current assignments with the ideal assignment for the common part. List<String> commonAllocations = new ArrayList<>(currentAllocations); commonAllocations.retainAll(idealAllocations); if (!commonAllocations.isEmpty()) { // 1. If the partition is allocated at the same location in both ideal and current // assignments, there is no need to reassign it. String allocatedInstance = commonAllocations.get(0); allocatedReplicas.computeIfAbsent(allocatedInstance, key -> new HashSet<>()).add(replica); // Remove the instance from the record to prevent this instance from being processed twice. idealAllocations.remove(allocatedInstance); currentAllocations.remove(allocatedInstance); } else if (!idealAllocations.isEmpty()) { // 2. If the partition is allocated at an active instance in the ideal assignment but the // same allocation does not exist in the current assignment, try to rebalance the replica // or assign it if the replica has not been assigned. // There are two possible conditions, // * This replica has been newly added and has not been assigned yet, so it appears in // the ideal assignment and does not appear in the current assignment. // * The allocation of this replica in the ideal assignment has been updated due to a // cluster change. For example, new instance is added. So the old allocation in the // current assignment might be sub-optimal. // In either condition, we add it to toBeAssignedReplicas so that it will get assigned. toBeAssignedReplicas.add(replica); // Remove the pending allocation from the idealAllocations after processing so that the // instance won't be double-processed as we loop through all replicas String pendingAllocation = idealAllocations.iterator().next(); idealAllocations.remove(pendingAllocation); } else if (!currentAllocations.isEmpty()) { // 3. This replica exists in the current assignment but does not appear or has a valid // allocation in the ideal assignment. // This means either 1) that the ideal assignment actually has this replica allocated on // this instance, but it does not show up because the instance is temporarily offline or // disabled (note that all such instances have been filtered out in earlier part of the // logic) or that the most recent version of the ideal assignment was not fetched // correctly from the assignment metadata store. // In either case, the solution is to keep the current assignment. So put this replica // with the allocated instance into the allocatedReplicas map. String allocatedInstance = currentAllocations.iterator().next(); allocatedReplicas.computeIfAbsent(allocatedInstance, key -> new HashSet<>()).add(replica); // Remove the instance from the record to prevent the same location being processed again. currentAllocations.remove(allocatedInstance); } else { // 4. This replica is not found in either the ideal assignment or the current assignment // with a valid allocation. This implies that the replica was newly added but was never // assigned in reality or was added so recently that it hasn't shown up in the ideal // assignment (because it's calculation takes longer and is asynchronously calculated). // In that case, we add it to toBeAssignedReplicas so that it will get assigned as a // result of partialRebalance. toBeAssignedReplicas.add(replica); } } } return toBeAssignedReplicas; } /** * Find the minimum set of replicas that need to be reassigned according to the cluster change. * A replica needs to be reassigned if one of the following condition is true: * 1. Cluster topology (the cluster config / any instance config) has been updated. * 2. The resource config has been updated. * 3. If the current assignment does not contain the partition's valid assignment. * * @param replicaMap A map contains all the replicas grouped by resource name. * @param activeInstances All the instances that are live and enabled according to the delay rebalance configuration. * @param liveInstances All the instances that are live. * @param clusterChanges A map that contains all the important metadata updates that happened after the previous rebalance. * @param currentAssignment The current replica assignment. * @param allocatedReplicas Return the allocated replicas grouped by the target instance name. * @return The replicas that need to be reassigned. */ private static Set<AssignableReplica> findToBeAssignedReplicasByClusterChanges( Map<String, Set<AssignableReplica>> replicaMap, Set<String> activeInstances, Set<String> liveInstances, Map<HelixConstants.ChangeType, Set<String>> clusterChanges, Map<String, ResourceAssignment> currentAssignment, Map<String, Set<AssignableReplica>> allocatedReplicas) { Set<AssignableReplica> toBeAssignedReplicas = new HashSet<>(); // A newly connected node = A new LiveInstance znode (or session Id updated) & the // corresponding instance is live. // TODO: The assumption here is that if the LiveInstance znode is created or it's session Id is // TODO: updated, we need to call algorithm for moving some partitions to this new node. // TODO: However, if the liveInstance znode is changed because of some other reason, it will be // TODO: treated as a newly connected nodes. We need to find a better way to identify which one // TODO: is the real newly connected nodes. Set<String> newlyConnectedNodes = clusterChanges .getOrDefault(HelixConstants.ChangeType.LIVE_INSTANCE, Collections.emptySet()); newlyConnectedNodes.retainAll(liveInstances); if (clusterChanges.containsKey(HelixConstants.ChangeType.CLUSTER_CONFIG) || clusterChanges.containsKey(HelixConstants.ChangeType.INSTANCE_CONFIG) || !newlyConnectedNodes.isEmpty()) { // 1. If the cluster topology has been modified, need to reassign all replicas. // 2. If any node was newly connected, need to rebalance all replicas for the evenness of // distribution. toBeAssignedReplicas .addAll(replicaMap.values().stream().flatMap(Set::stream).collect(Collectors.toSet())); } else { // check each resource to identify the allocated replicas and to-be-assigned replicas. for (Map.Entry<String, Set<AssignableReplica>> replicaMapEntry : replicaMap.entrySet()) { String resourceName = replicaMapEntry.getKey(); Set<AssignableReplica> replicas = replicaMapEntry.getValue(); // 1. if the resource config/idealstate is changed, need to reassign. // 2. if the resource does not appear in the current assignment, need to reassign. if (clusterChanges .getOrDefault(HelixConstants.ChangeType.RESOURCE_CONFIG, Collections.emptySet()) .contains(resourceName) || clusterChanges .getOrDefault(HelixConstants.ChangeType.IDEAL_STATE, Collections.emptySet()) .contains(resourceName) || !currentAssignment.containsKey(resourceName)) { toBeAssignedReplicas.addAll(replicas); // go to check next resource } else { // check for every replica assignment to identify if the related replicas need to be reassigned. // <partition, <state, instances list>> Map<String, Map<String, Set<String>>> stateMap = getValidStateInstanceMap(currentAssignment.get(resourceName), activeInstances); for (AssignableReplica replica : replicas) { // Find any ACTIVE instance allocation that has the same state with the replica Set<String> validInstances = stateMap.getOrDefault(replica.getPartitionName(), Collections.emptyMap()) .getOrDefault(replica.getReplicaState(), Collections.emptySet()); if (validInstances.isEmpty()) { // 3. if no such an instance in the current assignment, need to reassign the replica toBeAssignedReplicas.add(replica); } else { Iterator<String> iter = validInstances.iterator(); // Remove the instance from the current allocation record after processing so that it // won't be double-processed as we loop through all replicas String logicalId = iter.next(); iter.remove(); // the current assignment for this replica is valid, // add to the allocated replica list. allocatedReplicas.computeIfAbsent(logicalId, key -> new HashSet<>()).add(replica); } } } } } return toBeAssignedReplicas; } /** * Find replicas that were assigned to non-active nodes in the current assignment. * * @param replicaMap A map contains all the replicas grouped by resource name. * @param activeInstances All the instances that are live and enabled according to the delay rebalance configuration. * @param currentAssignment The current assignment that was generated in the previous rebalance. * @param allocatedReplicas A map of <Instance -> replicas> to return the allocated replicas grouped by the target instance name. * @return The replicas that need to be reassigned. */ private static Set<AssignableReplica> findToBeAssignedReplicasOnDownInstances( Map<String, Set<AssignableReplica>> replicaMap, Set<String> activeInstances, Map<String, ResourceAssignment> currentAssignment, Map<String, Set<AssignableReplica>> allocatedReplicas) { // For any replica that are assigned to non-active instances (down instances), add them. Set<AssignableReplica> toBeAssignedReplicas = new HashSet<>(); for (String resourceName : replicaMap.keySet()) { Map<String, Map<String, Set<String>>> stateInstanceMap = getStateInstanceMap(currentAssignment.get(resourceName)); for (AssignableReplica replica : replicaMap.get(resourceName)) { String partitionName = replica.getPartitionName(); String replicaState = replica.getReplicaState(); Set<String> currentAllocations = stateInstanceMap.getOrDefault(partitionName, Collections.emptyMap()) .getOrDefault(replicaState, Collections.emptySet()); if (!currentAllocations.isEmpty()) { String allocatedInstance = currentAllocations.iterator().next(); if (activeInstances.contains(allocatedInstance)) { allocatedReplicas.computeIfAbsent(allocatedInstance, key -> new HashSet<>()).add(replica); } else { toBeAssignedReplicas.add(replica); } currentAllocations.remove(allocatedInstance); } } } return toBeAssignedReplicas; } /** * Filter to remove all invalid allocations that are not on the active instances. * @param assignment * @param activeInstances * @return A map of <partition, <state, instances set>> contains the valid state to instance map. */ private static Map<String, Map<String, Set<String>>> getValidStateInstanceMap( ResourceAssignment assignment, Set<String> activeInstances) { Map<String, Map<String, Set<String>>> stateInstanceMap = getStateInstanceMap(assignment); stateInstanceMap.values().forEach(stateMap -> stateMap.values().forEach(instanceSet -> instanceSet.retainAll(activeInstances))); return stateInstanceMap; } // <partition, <state, instances set>> public static Map<String, Map<String, Set<String>>> getStateInstanceMap( ResourceAssignment assignment) { if (assignment == null) { return Collections.emptyMap(); } return assignment.getMappedPartitions().stream() .collect(Collectors.toMap(Partition::getPartitionName, partition -> { Map<String, Set<String>> stateInstanceMap = new HashMap<>(); assignment.getReplicaMap(partition) .forEach((key1, value) -> stateInstanceMap.computeIfAbsent(value, key -> new HashSet<>()).add(key1)); return stateInstanceMap; })); } /** * Get all the nodes that can be assigned replicas based on the configurations. * * @param clusterConfig The cluster configuration. * @param instanceConfigMap A map of all the instance configuration. * If any active instance has no configuration, it will be ignored. * @param activeInstances All the instances that are online and enabled. * @return A map of assignable node set, <InstanceName, node set>. */ private static Set<AssignableNode> getAllAssignableNodes(ClusterConfig clusterConfig, Map<String, InstanceConfig> instanceConfigMap, Set<String> activeInstances) { ClusterTopologyConfig clusterTopologyConfig = ClusterTopologyConfig.createFromClusterConfig(clusterConfig); return activeInstances.parallelStream() .filter(instanceConfigMap::containsKey).map( instanceName -> new AssignableNode(clusterConfig, clusterTopologyConfig, instanceConfigMap.get(instanceName), instanceName)).collect(Collectors.toSet()); } /** * Get all the replicas that need to be reallocated from the cluster data cache. * * @param dataProvider The cluster status cache that contains the current cluster status. * @param resourceMap All the valid resources that are managed by the rebalancer. * @param assignableNodes All the active assignable nodes. * @return A map of assignable replica set, <ResourceName, replica set>. */ private static Map<String, Set<AssignableReplica>> getAllAssignableReplicas( ResourceControllerDataProvider dataProvider, Map<String, Resource> resourceMap, Set<AssignableNode> assignableNodes) { ClusterConfig clusterConfig = dataProvider.getClusterConfig(); int activeFaultZoneCount = assignableNodes.stream().map(AssignableNode::getFaultZone) .collect(Collectors.toSet()).size(); return resourceMap.keySet().parallelStream().map(resourceName -> { ResourceConfig resourceConfig = dataProvider.getResourceConfig(resourceName); if (resourceConfig == null) { resourceConfig = new ResourceConfig(resourceName); } IdealState is = dataProvider.getIdealState(resourceName); if (is == null) { throw new HelixException( "Cannot find the resource ideal state for resource: " + resourceName); } String defName = is.getStateModelDefRef(); StateModelDefinition def = dataProvider.getStateModelDef(defName); if (def == null) { throw new IllegalArgumentException(String .format("Cannot find state model definition %s for resource %s.", is.getStateModelDefRef(), resourceName)); } Map<String, Integer> stateCountMap = def.getStateCountMap(activeFaultZoneCount, is.getReplicaCount(assignableNodes.size())); ResourceConfig mergedResourceConfig = ResourceConfig.mergeIdealStateWithResourceConfig(resourceConfig, is); Set<AssignableReplica> replicas = new HashSet<>(); for (String partition : is.getPartitionSet()) { for (Map.Entry<String, Integer> entry : stateCountMap.entrySet()) { String state = entry.getKey(); for (int i = 0; i < entry.getValue(); i++) { replicas.add(new AssignableReplica(clusterConfig, mergedResourceConfig, partition, state, def.getStatePriorityMap().get(state))); } } } return new HashMap.SimpleEntry<>(resourceName, replicas); }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } /** * @return A map containing the assignments for each fault zone. <fault zone, <resource, set of partitions>> */ private static Map<String, Map<String, Set<String>>> mapAssignmentToFaultZone( Set<AssignableNode> assignableNodes) { Map<String, Map<String, Set<String>>> faultZoneAssignmentMap = new HashMap<>(); assignableNodes.forEach(node -> { for (Map.Entry<String, Set<String>> resourceMap : node.getAssignedPartitionsMap() .entrySet()) { faultZoneAssignmentMap.computeIfAbsent(node.getFaultZone(), k -> new HashMap<>()) .computeIfAbsent(resourceMap.getKey(), k -> new HashSet<>()) .addAll(resourceMap.getValue()); } }); return faultZoneAssignmentMap; } }
googleapis/google-cloud-java
36,928
java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/AlertProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/alert.proto // Protobuf Java Version: 3.25.8 package com.google.monitoring.v3; public final class AlertProto { private AlertProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Documentation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Documentation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Documentation_Link_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Documentation_Link_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_Trigger_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_Trigger_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_ForecastOptions_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_ForecastOptions_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricAbsence_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricAbsence_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_LabelExtractorsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_LabelExtractorsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_MonitoringQueryLanguageCondition_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_MonitoringQueryLanguageCondition_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_LabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_LabelsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Minutes_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Minutes_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Hourly_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Hourly_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Daily_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Daily_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_RowCountTest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_RowCountTest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_BooleanTest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_BooleanTest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationRateLimit_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationRateLimit_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationChannelStrategy_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationChannelStrategy_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_AlertPolicy_UserLabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_AlertPolicy_UserLabelsEntry_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + " google/monitoring/v3/alert.proto\022\024goog" + "le.monitoring.v3\032\037google/api/field_behav" + "ior.proto\032\031google/api/resource.proto\032!go" + "ogle/monitoring/v3/common.proto\032*google/" + "monitoring/v3/mutation_record.proto\032\036goo" + "gle/protobuf/duration.proto\032\036google/prot" + "obuf/wrappers.proto\032\027google/rpc/status.proto\032\033google/type/timeofday.proto\"\225-\n" + "\013AlertPolicy\022\021\n" + "\004name\030\001 \001(\tB\003\340A\010\022\024\n" + "\014display_name\030\002 \001(\t\022F\n\r" + "documentation\030\r" + " \001(\0132/.google.monitoring.v3.AlertPolicy.Documentation\022F\n" + "\013user_labels\030\020" + " \003(\01321.google.monitoring.v3.AlertPolicy.UserLabelsEntry\022?\n\n" + "conditions\030\014 \003(\0132+.google.monitoring.v3.AlertPolicy.Condition\022I\n" + "\010combiner\030\006 \001(\01627." + "google.monitoring.v3.AlertPolicy.ConditionCombinerType\022+\n" + "\007enabled\030\021 \001(\0132\032.google.protobuf.BoolValue\022$\n" + "\010validity\030\022 \001(\0132\022.google.rpc.Status\022\035\n" + "\025notification_channels\030\016 \003(\t\022=\n" + "\017creation_record\030\n" + " \001(\0132$.google.monitoring.v3.MutationRecord\022=\n" + "\017mutation_record\030\013 \001(\0132$.google.monitoring.v3.MutationRecord\022G\n" + "\016alert_strategy\030\025 \001(\0132/" + ".google.monitoring.v3.AlertPolicy.AlertStrategy\022A\n" + "\010severity\030\026" + " \001(\0162*.google.monitoring.v3.AlertPolicy.SeverityB\003\340A\001\032\276\001\n\r" + "Documentation\022\017\n" + "\007content\030\001 \001(\t\022\021\n" + "\tmime_type\030\002 \001(\t\022\024\n" + "\007subject\030\003 \001(\tB\003\340A\001\022H\n" + "\005links\030\004" + " \003(\01324.google.monitoring.v3.AlertPolicy.Documentation.LinkB\003\340A\001\032)\n" + "\004Link\022\024\n" + "\014display_name\030\001 \001(\t\022\013\n" + "\003url\030\002 \001(\t\032\306\035\n" + "\tCondition\022\014\n" + "\004name\030\014 \001(\t\022\024\n" + "\014display_name\030\006 \001(\t\022Z\n" + "\023condition_threshold\030\001 \001(\0132;.google.moni" + "toring.v3.AlertPolicy.Condition.MetricThresholdH\000\022U\n" + "\020condition_absent\030\002 \001(\01329.go" + "ogle.monitoring.v3.AlertPolicy.Condition.MetricAbsenceH\000\022U\n" + "\025condition_matched_log\030\024" + " \001(\01324.google.monitoring.v3.AlertPolicy.Condition.LogMatchH\000\022{\n" + "#condition_monitoring_query_language\030\023 \001(\0132L.google.mo" + "nitoring.v3.AlertPolicy.Condition.MonitoringQueryLanguageConditionH\000\022{\n" + "#condition_prometheus_query_language\030\025 \001(\0132L.goog" + "le.monitoring.v3.AlertPolicy.Condition.PrometheusQueryLanguageConditionH\000\022Q\n\r" + "condition_sql\030\026" + " \001(\01328.google.monitoring.v3.AlertPolicy.Condition.SqlConditionH\000\0325\n" + "\007Trigger\022\017\n" + "\005count\030\001 \001(\005H\000\022\021\n" + "\007percent\030\002 \001(\001H\000B\006\n" + "\004type\032\236\005\n" + "\017MetricThreshold\022\023\n" + "\006filter\030\002 \001(\tB\003\340A\002\0227\n" + "\014aggregations\030\010 \003(\0132!.google.monitoring.v3.Aggregation\022\032\n" + "\022denominator_filter\030\t \001(\t\022C\n" + "\030denominator_aggregations\030\n" + " \003(\0132!.google.monitoring.v3.Aggregation\022e\n" + "\020forecast_options\030\014 \001(\0132K.googl" + "e.monitoring.v3.AlertPolicy.Condition.MetricThreshold.ForecastOptions\0228\n\n" + "comparison\030\004 \001(\0162$.google.monitoring.v3.ComparisonType\022\027\n" + "\017threshold_value\030\005 \001(\001\022+\n" + "\010duration\030\006 \001(\0132\031.google.protobuf.Duration\022D\n" + "\007trigger\030\007" + " \001(\01323.google.monitoring.v3.AlertPolicy.Condition.Trigger\022b\n" + "\027evaluation_missing_data\030\013 \001(\0162A.google.monitoring" + ".v3.AlertPolicy.Condition.EvaluationMissingData\032K\n" + "\017ForecastOptions\0228\n" + "\020forecast_horizon\030\001" + " \001(\0132\031.google.protobuf.DurationB\003\340A\002\032\320\001\n\r" + "MetricAbsence\022\023\n" + "\006filter\030\001 \001(\tB\003\340A\002\0227\n" + "\014aggregations\030\005 \003(\0132!.google.monitoring.v3.Aggregation\022+\n" + "\010duration\030\002 \001(\0132\031.google.protobuf.Duration\022D\n" + "\007trigger\030\003 \001" + "(\01323.google.monitoring.v3.AlertPolicy.Condition.Trigger\032\274\001\n" + "\010LogMatch\022\023\n" + "\006filter\030\001 \001(\tB\003\340A\002\022c\n" + "\020label_extractors\030\002 \003(\0132I.go" + "ogle.monitoring.v3.AlertPolicy.Condition.LogMatch.LabelExtractorsEntry\0326\n" + "\024LabelExtractorsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001\032\210\002\n" + " MonitoringQueryLanguageCondition\022\r\n" + "\005query\030\001 \001(\t\022+\n" + "\010duration\030\002 \001(\0132\031.google.protobuf.Duration\022D\n" + "\007trigger\030\003 \001(\013" + "23.google.monitoring.v3.AlertPolicy.Condition.Trigger\022b\n" + "\027evaluation_missing_data\030\004" + " \001(\0162A.google.monitoring.v3.AlertPolicy.Condition.EvaluationMissingData\032\235\003\n" + " PrometheusQueryLanguageCondition\022\022\n" + "\005query\030\001 \001(\tB\003\340A\002\0220\n" + "\010duration\030\002 \001(\0132\031.google.protobuf.DurationB\003\340A\001\022;\n" + "\023evaluation_interval\030\003" + " \001(\0132\031.google.protobuf.DurationB\003\340A\001\022m\n" + "\006labels\030\004 \003(\0132X.google.monitoring.v3" + ".AlertPolicy.Condition.PrometheusQueryLanguageCondition.LabelsEntryB\003\340A\001\022\027\n\n" + "rule_group\030\005 \001(\tB\003\340A\001\022\027\n\n" + "alert_rule\030\006 \001(\tB\003\340A\001\022&\n" + "\031disable_metric_validation\030\007 \001(\010B\003\340A\001\032-\n" + "\013LabelsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001\032\321\006\n" + "\014SqlCondition\022\022\n" + "\005query\030\001 \001(\tB\003\340A\002\022S\n" + "\007minutes\030\002 \001(\0132@.google.monitor" + "ing.v3.AlertPolicy.Condition.SqlCondition.MinutesH\000\022Q\n" + "\006hourly\030\003 \001(\0132?.google.mon" + "itoring.v3.AlertPolicy.Condition.SqlCondition.HourlyH\000\022O\n" + "\005daily\030\004 \001(\0132>.google.m" + "onitoring.v3.AlertPolicy.Condition.SqlCondition.DailyH\000\022_\n" + "\016row_count_test\030\005 \001(\0132" + "E.google.monitoring.v3.AlertPolicy.Condition.SqlCondition.RowCountTestH\001\022\\\n" + "\014boolean_test\030\006 \001(\0132D.google.monitoring.v3.Al" + "ertPolicy.Condition.SqlCondition.BooleanTestH\001\032#\n" + "\007Minutes\022\030\n" + "\013periodicity\030\001 \001(\005B\003\340A\002\032U\n" + "\006Hourly\022\030\n" + "\013periodicity\030\001 \001(\005B\003\340A\002\022\037\n\r" + "minute_offset\030\002 \001(\005B\003\340A\001H\000\210\001\001B\020\n" + "\016_minute_offset\032V\n" + "\005Daily\022\030\n" + "\013periodicity\030\001 \001(\005B\003\340A\002\0223\n" + "\016execution_time\030\002 \001(\0132\026.google.type.TimeOfDayB\003\340A\001\032e\n" + "\014RowCountTest\022=\n\n" + "comparison\030\001" + " \001(\0162$.google.monitoring.v3.ComparisonTypeB\003\340A\002\022\026\n" + "\tthreshold\030\002 \001(\003B\003\340A\002\032\"\n" + "\013BooleanTest\022\023\n" + "\006column\030\001 \001(\tB\003\340A\002B\n\n" + "\010scheduleB\n\n" + "\010evaluate\"\255\001\n" + "\025EvaluationMissingData\022\'\n" + "#EVALUATION_MISSING_DATA_UNSPECIFIED\020\000\022$\n" + " EVALUATION_MISSING_DATA_INACTIVE\020\001\022\"\n" + "\036EVALUATION_MISSING_DATA_ACTIVE\020\002\022!\n" + "\035EVALUATION_MISSING_DATA_NO_OP\020\003:\227\002\352A\223\002\n" + ".monitoring.googleapis.com/AlertPolicyCondition\022Fprojects/{project}/alertPo" + "licies/{alert_policy}/conditions/{condition}\022Porganizations/{organization}/alert" + "Policies/{alert_policy}/conditions/{condition}\022Dfolders/{folder}/alertPolicies/{" + "alert_policy}/conditions/{condition}\022\001*B\013\n" + "\tcondition\032\214\005\n\r" + "AlertStrategy\022f\n" + "\027notification_rate_limit\030\001 \001(\0132E.google.monitor" + "ing.v3.AlertPolicy.AlertStrategy.NotificationRateLimit\022`\n" + "\024notification_prompts\030\002" + " \003(\0162B.google.monitoring.v3.AlertPolicy.AlertStrategy.NotificationPrompt\022-\n\n" + "auto_close\030\003 \001(\0132\031.google.protobuf.Duration\022r\n" + "\035notification_channel_strategy\030\004 \003(\0132K" + ".google.monitoring.v3.AlertPolicy.AlertStrategy.NotificationChannelStrategy\032B\n" + "\025NotificationRateLimit\022)\n" + "\006period\030\001 \001(\0132\031.google.protobuf.Duration\032w\n" + "\033NotificationChannelStrategy\022\"\n" + "\032notification_channel_names\030\001 \003(\t\0224\n" + "\021renotify_interval\030\002 \001(\0132\031.google.protobuf.Duration\"Q\n" + "\022NotificationPrompt\022#\n" + "\037NOTIFICATION_PROMPT_UNSPECIFIED\020\000\022\n\n" + "\006OPENED\020\001\022\n\n" + "\006CLOSED\020\003\0321\n" + "\017UserLabelsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001\"a\n" + "\025ConditionCombinerType\022\027\n" + "\023COMBINE_UNSPECIFIED\020\000\022\007\n" + "\003AND\020\001\022\006\n" + "\002OR\020\002\022\036\n" + "\032AND_WITH_MATCHING_RESOURCE\020\003\"J\n" + "\010Severity\022\030\n" + "\024SEVERITY_UNSPECIFIED\020\000\022\014\n" + "\010CRITICAL\020\001\022\t\n" + "\005ERROR\020\002\022\013\n" + "\007WARNING\020\003:\311\001\352A\305\001\n" + "%monitoring.googleapis.com/AlertPolicy\022/projects/{project}/" + "alertPolicies/{alert_policy}\0229organizations/{organization}/alertPolicies/{alert_" + "policy}\022-folders/{folder}/alertPolicies/{alert_policy}\022\001*B\305\001\n" + "\030com.google.monitoring.v3B\n" + "AlertProtoP\001ZAcloud.google.com/go/monitoring/apiv3/v2/monitoringpb;monit" + "oringpb\252\002\032Google.Cloud.Monitoring.V3\312\002\032G" + "oogle\\Cloud\\Monitoring\\V3\352\002\035Google::Cloud::Monitoring::V3b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.monitoring.v3.CommonProto.getDescriptor(), com.google.monitoring.v3.MutationRecordProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.WrappersProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), com.google.type.TimeOfDayProto.getDescriptor(), }); internal_static_google_monitoring_v3_AlertPolicy_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_monitoring_v3_AlertPolicy_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_descriptor, new java.lang.String[] { "Name", "DisplayName", "Documentation", "UserLabels", "Conditions", "Combiner", "Enabled", "Validity", "NotificationChannels", "CreationRecord", "MutationRecord", "AlertStrategy", "Severity", }); internal_static_google_monitoring_v3_AlertPolicy_Documentation_descriptor = internal_static_google_monitoring_v3_AlertPolicy_descriptor.getNestedTypes().get(0); internal_static_google_monitoring_v3_AlertPolicy_Documentation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Documentation_descriptor, new java.lang.String[] { "Content", "MimeType", "Subject", "Links", }); internal_static_google_monitoring_v3_AlertPolicy_Documentation_Link_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Documentation_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Documentation_Link_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Documentation_Link_descriptor, new java.lang.String[] { "DisplayName", "Url", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor = internal_static_google_monitoring_v3_AlertPolicy_descriptor.getNestedTypes().get(1); internal_static_google_monitoring_v3_AlertPolicy_Condition_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor, new java.lang.String[] { "Name", "DisplayName", "ConditionThreshold", "ConditionAbsent", "ConditionMatchedLog", "ConditionMonitoringQueryLanguage", "ConditionPrometheusQueryLanguage", "ConditionSql", "Condition", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_Trigger_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Condition_Trigger_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_Trigger_descriptor, new java.lang.String[] { "Count", "Percent", "Type", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(1); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_descriptor, new java.lang.String[] { "Filter", "Aggregations", "DenominatorFilter", "DenominatorAggregations", "ForecastOptions", "Comparison", "ThresholdValue", "Duration", "Trigger", "EvaluationMissingData", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_ForecastOptions_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_ForecastOptions_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricThreshold_ForecastOptions_descriptor, new java.lang.String[] { "ForecastHorizon", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricAbsence_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(2); internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricAbsence_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_MetricAbsence_descriptor, new java.lang.String[] { "Filter", "Aggregations", "Duration", "Trigger", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(3); internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_descriptor, new java.lang.String[] { "Filter", "LabelExtractors", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_LabelExtractorsEntry_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_LabelExtractorsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_LogMatch_LabelExtractorsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_MonitoringQueryLanguageCondition_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(4); internal_static_google_monitoring_v3_AlertPolicy_Condition_MonitoringQueryLanguageCondition_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_MonitoringQueryLanguageCondition_descriptor, new java.lang.String[] { "Query", "Duration", "Trigger", "EvaluationMissingData", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(5); internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_descriptor, new java.lang.String[] { "Query", "Duration", "EvaluationInterval", "Labels", "RuleGroup", "AlertRule", "DisableMetricValidation", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_LabelsEntry_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_LabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_PrometheusQueryLanguageCondition_LabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_descriptor .getNestedTypes() .get(6); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor, new java.lang.String[] { "Query", "Minutes", "Hourly", "Daily", "RowCountTest", "BooleanTest", "Schedule", "Evaluate", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Minutes_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Minutes_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Minutes_descriptor, new java.lang.String[] { "Periodicity", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Hourly_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor .getNestedTypes() .get(1); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Hourly_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Hourly_descriptor, new java.lang.String[] { "Periodicity", "MinuteOffset", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Daily_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor .getNestedTypes() .get(2); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Daily_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_Daily_descriptor, new java.lang.String[] { "Periodicity", "ExecutionTime", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_RowCountTest_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor .getNestedTypes() .get(3); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_RowCountTest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_RowCountTest_descriptor, new java.lang.String[] { "Comparison", "Threshold", }); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_BooleanTest_descriptor = internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_descriptor .getNestedTypes() .get(4); internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_BooleanTest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_Condition_SqlCondition_BooleanTest_descriptor, new java.lang.String[] { "Column", }); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_descriptor = internal_static_google_monitoring_v3_AlertPolicy_descriptor.getNestedTypes().get(2); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_descriptor, new java.lang.String[] { "NotificationRateLimit", "NotificationPrompts", "AutoClose", "NotificationChannelStrategy", }); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationRateLimit_descriptor = internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationRateLimit_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationRateLimit_descriptor, new java.lang.String[] { "Period", }); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationChannelStrategy_descriptor = internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_descriptor .getNestedTypes() .get(1); internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationChannelStrategy_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_AlertStrategy_NotificationChannelStrategy_descriptor, new java.lang.String[] { "NotificationChannelNames", "RenotifyInterval", }); internal_static_google_monitoring_v3_AlertPolicy_UserLabelsEntry_descriptor = internal_static_google_monitoring_v3_AlertPolicy_descriptor.getNestedTypes().get(3); internal_static_google_monitoring_v3_AlertPolicy_UserLabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_AlertPolicy_UserLabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.ResourceProto.resource); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.monitoring.v3.CommonProto.getDescriptor(); com.google.monitoring.v3.MutationRecordProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.WrappersProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); com.google.type.TimeOfDayProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
googleapis/google-cloud-java
37,821
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/Content.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/content.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * The base structured datatype containing multi-part content of a message. * * A `Content` includes a `role` field designating the producer of the `Content` * and a `parts` field containing multi-part data that contains the content of * the message turn. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.Content} */ public final class Content extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.Content) ContentOrBuilder { private static final long serialVersionUID = 0L; // Use Content.newBuilder() to construct. private Content(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Content() { role_ = ""; parts_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Content(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ContentProto .internal_static_google_cloud_aiplatform_v1_Content_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ContentProto .internal_static_google_cloud_aiplatform_v1_Content_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.Content.class, com.google.cloud.aiplatform.v1.Content.Builder.class); } public static final int ROLE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object role_ = ""; /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The role. */ @java.lang.Override public java.lang.String getRole() { java.lang.Object ref = role_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); role_ = s; return s; } } /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for role. */ @java.lang.Override public com.google.protobuf.ByteString getRoleBytes() { java.lang.Object ref = role_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); role_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARTS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.Part> parts_; /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.Part> getPartsList() { return parts_; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.PartOrBuilder> getPartsOrBuilderList() { return parts_; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getPartsCount() { return parts_.size(); } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.Part getParts(int index) { return parts_.get(index); } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.PartOrBuilder getPartsOrBuilder(int index) { return parts_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(role_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, role_); } for (int i = 0; i < parts_.size(); i++) { output.writeMessage(2, parts_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(role_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, role_); } for (int i = 0; i < parts_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, parts_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.Content)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.Content other = (com.google.cloud.aiplatform.v1.Content) obj; if (!getRole().equals(other.getRole())) return false; if (!getPartsList().equals(other.getPartsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ROLE_FIELD_NUMBER; hash = (53 * hash) + getRole().hashCode(); if (getPartsCount() > 0) { hash = (37 * hash) + PARTS_FIELD_NUMBER; hash = (53 * hash) + getPartsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.Content parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.Content parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.Content parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.Content parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.Content parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.Content parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.Content parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.Content parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.Content parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.Content parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.Content parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.Content parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.Content prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The base structured datatype containing multi-part content of a message. * * A `Content` includes a `role` field designating the producer of the `Content` * and a `parts` field containing multi-part data that contains the content of * the message turn. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.Content} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.Content) com.google.cloud.aiplatform.v1.ContentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.ContentProto .internal_static_google_cloud_aiplatform_v1_Content_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.ContentProto .internal_static_google_cloud_aiplatform_v1_Content_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.Content.class, com.google.cloud.aiplatform.v1.Content.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.Content.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; role_ = ""; if (partsBuilder_ == null) { parts_ = java.util.Collections.emptyList(); } else { parts_ = null; partsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.ContentProto .internal_static_google_cloud_aiplatform_v1_Content_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.Content getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.Content.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.Content build() { com.google.cloud.aiplatform.v1.Content result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.Content buildPartial() { com.google.cloud.aiplatform.v1.Content result = new com.google.cloud.aiplatform.v1.Content(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.aiplatform.v1.Content result) { if (partsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { parts_ = java.util.Collections.unmodifiableList(parts_); bitField0_ = (bitField0_ & ~0x00000002); } result.parts_ = parts_; } else { result.parts_ = partsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1.Content result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.role_ = role_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.Content) { return mergeFrom((com.google.cloud.aiplatform.v1.Content) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.Content other) { if (other == com.google.cloud.aiplatform.v1.Content.getDefaultInstance()) return this; if (!other.getRole().isEmpty()) { role_ = other.role_; bitField0_ |= 0x00000001; onChanged(); } if (partsBuilder_ == null) { if (!other.parts_.isEmpty()) { if (parts_.isEmpty()) { parts_ = other.parts_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensurePartsIsMutable(); parts_.addAll(other.parts_); } onChanged(); } } else { if (!other.parts_.isEmpty()) { if (partsBuilder_.isEmpty()) { partsBuilder_.dispose(); partsBuilder_ = null; parts_ = other.parts_; bitField0_ = (bitField0_ & ~0x00000002); partsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPartsFieldBuilder() : null; } else { partsBuilder_.addAllMessages(other.parts_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { role_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloud.aiplatform.v1.Part m = input.readMessage( com.google.cloud.aiplatform.v1.Part.parser(), extensionRegistry); if (partsBuilder_ == null) { ensurePartsIsMutable(); parts_.add(m); } else { partsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object role_ = ""; /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The role. */ public java.lang.String getRole() { java.lang.Object ref = role_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); role_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for role. */ public com.google.protobuf.ByteString getRoleBytes() { java.lang.Object ref = role_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); role_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The role to set. * @return This builder for chaining. */ public Builder setRole(java.lang.String value) { if (value == null) { throw new NullPointerException(); } role_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRole() { role_ = getDefaultInstance().getRole(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Optional. The producer of the content. Must be either 'user' or 'model'. * * Useful to set for multi-turn conversations, otherwise can be left blank * or unset. * </pre> * * <code>string role = 1 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for role to set. * @return This builder for chaining. */ public Builder setRoleBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); role_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloud.aiplatform.v1.Part> parts_ = java.util.Collections.emptyList(); private void ensurePartsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { parts_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Part>(parts_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Part, com.google.cloud.aiplatform.v1.Part.Builder, com.google.cloud.aiplatform.v1.PartOrBuilder> partsBuilder_; /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.aiplatform.v1.Part> getPartsList() { if (partsBuilder_ == null) { return java.util.Collections.unmodifiableList(parts_); } else { return partsBuilder_.getMessageList(); } } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getPartsCount() { if (partsBuilder_ == null) { return parts_.size(); } else { return partsBuilder_.getCount(); } } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.Part getParts(int index) { if (partsBuilder_ == null) { return parts_.get(index); } else { return partsBuilder_.getMessage(index); } } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setParts(int index, com.google.cloud.aiplatform.v1.Part value) { if (partsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartsIsMutable(); parts_.set(index, value); onChanged(); } else { partsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setParts( int index, com.google.cloud.aiplatform.v1.Part.Builder builderForValue) { if (partsBuilder_ == null) { ensurePartsIsMutable(); parts_.set(index, builderForValue.build()); onChanged(); } else { partsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addParts(com.google.cloud.aiplatform.v1.Part value) { if (partsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartsIsMutable(); parts_.add(value); onChanged(); } else { partsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addParts(int index, com.google.cloud.aiplatform.v1.Part value) { if (partsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePartsIsMutable(); parts_.add(index, value); onChanged(); } else { partsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addParts(com.google.cloud.aiplatform.v1.Part.Builder builderForValue) { if (partsBuilder_ == null) { ensurePartsIsMutable(); parts_.add(builderForValue.build()); onChanged(); } else { partsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addParts( int index, com.google.cloud.aiplatform.v1.Part.Builder builderForValue) { if (partsBuilder_ == null) { ensurePartsIsMutable(); parts_.add(index, builderForValue.build()); onChanged(); } else { partsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllParts( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Part> values) { if (partsBuilder_ == null) { ensurePartsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, parts_); onChanged(); } else { partsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearParts() { if (partsBuilder_ == null) { parts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { partsBuilder_.clear(); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeParts(int index) { if (partsBuilder_ == null) { ensurePartsIsMutable(); parts_.remove(index); onChanged(); } else { partsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.Part.Builder getPartsBuilder(int index) { return getPartsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.PartOrBuilder getPartsOrBuilder(int index) { if (partsBuilder_ == null) { return parts_.get(index); } else { return partsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.PartOrBuilder> getPartsOrBuilderList() { if (partsBuilder_ != null) { return partsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(parts_); } } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.Part.Builder addPartsBuilder() { return getPartsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.Part.getDefaultInstance()); } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.Part.Builder addPartsBuilder(int index) { return getPartsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1.Part.getDefaultInstance()); } /** * * * <pre> * Required. Ordered `Parts` that constitute a single message. Parts may have * different IANA MIME types. * </pre> * * <code> * repeated .google.cloud.aiplatform.v1.Part parts = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.aiplatform.v1.Part.Builder> getPartsBuilderList() { return getPartsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Part, com.google.cloud.aiplatform.v1.Part.Builder, com.google.cloud.aiplatform.v1.PartOrBuilder> getPartsFieldBuilder() { if (partsBuilder_ == null) { partsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.Part, com.google.cloud.aiplatform.v1.Part.Builder, com.google.cloud.aiplatform.v1.PartOrBuilder>( parts_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); parts_ = null; } return partsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.Content) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.Content) private static final com.google.cloud.aiplatform.v1.Content DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.Content(); } public static com.google.cloud.aiplatform.v1.Content getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Content> PARSER = new com.google.protobuf.AbstractParser<Content>() { @java.lang.Override public Content parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Content> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Content> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.Content getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
38,064
java-dialogflow/grpc-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/KnowledgeBasesGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/dialogflow/v2/knowledge_base.proto") @io.grpc.stub.annotations.GrpcGenerated public final class KnowledgeBasesGrpc { private KnowledgeBasesGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.v2.KnowledgeBases"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest, com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> getListKnowledgeBasesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListKnowledgeBases", requestType = com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest.class, responseType = com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest, com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> getListKnowledgeBasesMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest, com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> getListKnowledgeBasesMethod; if ((getListKnowledgeBasesMethod = KnowledgeBasesGrpc.getListKnowledgeBasesMethod) == null) { synchronized (KnowledgeBasesGrpc.class) { if ((getListKnowledgeBasesMethod = KnowledgeBasesGrpc.getListKnowledgeBasesMethod) == null) { KnowledgeBasesGrpc.getListKnowledgeBasesMethod = getListKnowledgeBasesMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest, com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListKnowledgeBases")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse .getDefaultInstance())) .setSchemaDescriptor( new KnowledgeBasesMethodDescriptorSupplier("ListKnowledgeBases")) .build(); } } } return getListKnowledgeBasesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getGetKnowledgeBaseMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetKnowledgeBase", requestType = com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest.class, responseType = com.google.cloud.dialogflow.v2.KnowledgeBase.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getGetKnowledgeBaseMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getGetKnowledgeBaseMethod; if ((getGetKnowledgeBaseMethod = KnowledgeBasesGrpc.getGetKnowledgeBaseMethod) == null) { synchronized (KnowledgeBasesGrpc.class) { if ((getGetKnowledgeBaseMethod = KnowledgeBasesGrpc.getGetKnowledgeBaseMethod) == null) { KnowledgeBasesGrpc.getGetKnowledgeBaseMethod = getGetKnowledgeBaseMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetKnowledgeBase")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.KnowledgeBase.getDefaultInstance())) .setSchemaDescriptor( new KnowledgeBasesMethodDescriptorSupplier("GetKnowledgeBase")) .build(); } } } return getGetKnowledgeBaseMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getCreateKnowledgeBaseMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateKnowledgeBase", requestType = com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest.class, responseType = com.google.cloud.dialogflow.v2.KnowledgeBase.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getCreateKnowledgeBaseMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getCreateKnowledgeBaseMethod; if ((getCreateKnowledgeBaseMethod = KnowledgeBasesGrpc.getCreateKnowledgeBaseMethod) == null) { synchronized (KnowledgeBasesGrpc.class) { if ((getCreateKnowledgeBaseMethod = KnowledgeBasesGrpc.getCreateKnowledgeBaseMethod) == null) { KnowledgeBasesGrpc.getCreateKnowledgeBaseMethod = getCreateKnowledgeBaseMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "CreateKnowledgeBase")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.KnowledgeBase.getDefaultInstance())) .setSchemaDescriptor( new KnowledgeBasesMethodDescriptorSupplier("CreateKnowledgeBase")) .build(); } } } return getCreateKnowledgeBaseMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest, com.google.protobuf.Empty> getDeleteKnowledgeBaseMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteKnowledgeBase", requestType = com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest, com.google.protobuf.Empty> getDeleteKnowledgeBaseMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest, com.google.protobuf.Empty> getDeleteKnowledgeBaseMethod; if ((getDeleteKnowledgeBaseMethod = KnowledgeBasesGrpc.getDeleteKnowledgeBaseMethod) == null) { synchronized (KnowledgeBasesGrpc.class) { if ((getDeleteKnowledgeBaseMethod = KnowledgeBasesGrpc.getDeleteKnowledgeBaseMethod) == null) { KnowledgeBasesGrpc.getDeleteKnowledgeBaseMethod = getDeleteKnowledgeBaseMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "DeleteKnowledgeBase")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new KnowledgeBasesMethodDescriptorSupplier("DeleteKnowledgeBase")) .build(); } } } return getDeleteKnowledgeBaseMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getUpdateKnowledgeBaseMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateKnowledgeBase", requestType = com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest.class, responseType = com.google.cloud.dialogflow.v2.KnowledgeBase.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getUpdateKnowledgeBaseMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> getUpdateKnowledgeBaseMethod; if ((getUpdateKnowledgeBaseMethod = KnowledgeBasesGrpc.getUpdateKnowledgeBaseMethod) == null) { synchronized (KnowledgeBasesGrpc.class) { if ((getUpdateKnowledgeBaseMethod = KnowledgeBasesGrpc.getUpdateKnowledgeBaseMethod) == null) { KnowledgeBasesGrpc.getUpdateKnowledgeBaseMethod = getUpdateKnowledgeBaseMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "UpdateKnowledgeBase")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.v2.KnowledgeBase.getDefaultInstance())) .setSchemaDescriptor( new KnowledgeBasesMethodDescriptorSupplier("UpdateKnowledgeBase")) .build(); } } } return getUpdateKnowledgeBaseMethod; } /** Creates a new async stub that supports all call types for the service */ public static KnowledgeBasesStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesStub> factory = new io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesStub>() { @java.lang.Override public KnowledgeBasesStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesStub(channel, callOptions); } }; return KnowledgeBasesStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static KnowledgeBasesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesBlockingV2Stub>() { @java.lang.Override public KnowledgeBasesBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesBlockingV2Stub(channel, callOptions); } }; return KnowledgeBasesBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static KnowledgeBasesBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesBlockingStub>() { @java.lang.Override public KnowledgeBasesBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesBlockingStub(channel, callOptions); } }; return KnowledgeBasesBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static KnowledgeBasesFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<KnowledgeBasesFutureStub>() { @java.lang.Override public KnowledgeBasesFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesFutureStub(channel, callOptions); } }; return KnowledgeBasesFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public interface AsyncService { /** * * * <pre> * Returns the list of all knowledge bases of the specified agent. * </pre> */ default void listKnowledgeBases( com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListKnowledgeBasesMethod(), responseObserver); } /** * * * <pre> * Retrieves the specified knowledge base. * </pre> */ default void getKnowledgeBase( com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetKnowledgeBaseMethod(), responseObserver); } /** * * * <pre> * Creates a knowledge base. * </pre> */ default void createKnowledgeBase( com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateKnowledgeBaseMethod(), responseObserver); } /** * * * <pre> * Deletes the specified knowledge base. * </pre> */ default void deleteKnowledgeBase( com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteKnowledgeBaseMethod(), responseObserver); } /** * * * <pre> * Updates the specified knowledge base. * </pre> */ default void updateKnowledgeBase( com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateKnowledgeBaseMethod(), responseObserver); } } /** * Base class for the server implementation of the service KnowledgeBases. * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public abstract static class KnowledgeBasesImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return KnowledgeBasesGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service KnowledgeBases. * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public static final class KnowledgeBasesStub extends io.grpc.stub.AbstractAsyncStub<KnowledgeBasesStub> { private KnowledgeBasesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected KnowledgeBasesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesStub(channel, callOptions); } /** * * * <pre> * Returns the list of all knowledge bases of the specified agent. * </pre> */ public void listKnowledgeBases( com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListKnowledgeBasesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves the specified knowledge base. * </pre> */ public void getKnowledgeBase( com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetKnowledgeBaseMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a knowledge base. * </pre> */ public void createKnowledgeBase( com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateKnowledgeBaseMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes the specified knowledge base. * </pre> */ public void deleteKnowledgeBase( com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteKnowledgeBaseMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates the specified knowledge base. * </pre> */ public void updateKnowledgeBase( com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateKnowledgeBaseMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service KnowledgeBases. * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public static final class KnowledgeBasesBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<KnowledgeBasesBlockingV2Stub> { private KnowledgeBasesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected KnowledgeBasesBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Returns the list of all knowledge bases of the specified agent. * </pre> */ public com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse listKnowledgeBases( com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListKnowledgeBasesMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase getKnowledgeBase( com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase createKnowledgeBase( com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the specified knowledge base. * </pre> */ public com.google.protobuf.Empty deleteKnowledgeBase( com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase updateKnowledgeBase( com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateKnowledgeBaseMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service KnowledgeBases. * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public static final class KnowledgeBasesBlockingStub extends io.grpc.stub.AbstractBlockingStub<KnowledgeBasesBlockingStub> { private KnowledgeBasesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected KnowledgeBasesBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesBlockingStub(channel, callOptions); } /** * * * <pre> * Returns the list of all knowledge bases of the specified agent. * </pre> */ public com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse listKnowledgeBases( com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListKnowledgeBasesMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase getKnowledgeBase( com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase createKnowledgeBase( com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the specified knowledge base. * </pre> */ public com.google.protobuf.Empty deleteKnowledgeBase( com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteKnowledgeBaseMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified knowledge base. * </pre> */ public com.google.cloud.dialogflow.v2.KnowledgeBase updateKnowledgeBase( com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateKnowledgeBaseMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service KnowledgeBases. * * <pre> * Service for managing * [KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase]. * </pre> */ public static final class KnowledgeBasesFutureStub extends io.grpc.stub.AbstractFutureStub<KnowledgeBasesFutureStub> { private KnowledgeBasesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected KnowledgeBasesFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new KnowledgeBasesFutureStub(channel, callOptions); } /** * * * <pre> * Returns the list of all knowledge bases of the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse> listKnowledgeBases(com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListKnowledgeBasesMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves the specified knowledge base. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2.KnowledgeBase> getKnowledgeBase(com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetKnowledgeBaseMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a knowledge base. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2.KnowledgeBase> createKnowledgeBase(com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateKnowledgeBaseMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes the specified knowledge base. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteKnowledgeBase(com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteKnowledgeBaseMethod(), getCallOptions()), request); } /** * * * <pre> * Updates the specified knowledge base. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.v2.KnowledgeBase> updateKnowledgeBase(com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateKnowledgeBaseMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_KNOWLEDGE_BASES = 0; private static final int METHODID_GET_KNOWLEDGE_BASE = 1; private static final int METHODID_CREATE_KNOWLEDGE_BASE = 2; private static final int METHODID_DELETE_KNOWLEDGE_BASE = 3; private static final int METHODID_UPDATE_KNOWLEDGE_BASE = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_KNOWLEDGE_BASES: serviceImpl.listKnowledgeBases( (com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse>) responseObserver); break; case METHODID_GET_KNOWLEDGE_BASE: serviceImpl.getKnowledgeBase( (com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase>) responseObserver); break; case METHODID_CREATE_KNOWLEDGE_BASE: serviceImpl.createKnowledgeBase( (com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase>) responseObserver); break; case METHODID_DELETE_KNOWLEDGE_BASE: serviceImpl.deleteKnowledgeBase( (com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_UPDATE_KNOWLEDGE_BASE: serviceImpl.updateKnowledgeBase( (com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.v2.KnowledgeBase>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListKnowledgeBasesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2.ListKnowledgeBasesRequest, com.google.cloud.dialogflow.v2.ListKnowledgeBasesResponse>( service, METHODID_LIST_KNOWLEDGE_BASES))) .addMethod( getGetKnowledgeBaseMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2.GetKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase>( service, METHODID_GET_KNOWLEDGE_BASE))) .addMethod( getCreateKnowledgeBaseMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2.CreateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase>( service, METHODID_CREATE_KNOWLEDGE_BASE))) .addMethod( getDeleteKnowledgeBaseMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2.DeleteKnowledgeBaseRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_KNOWLEDGE_BASE))) .addMethod( getUpdateKnowledgeBaseMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.v2.UpdateKnowledgeBaseRequest, com.google.cloud.dialogflow.v2.KnowledgeBase>( service, METHODID_UPDATE_KNOWLEDGE_BASE))) .build(); } private abstract static class KnowledgeBasesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { KnowledgeBasesBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.dialogflow.v2.KnowledgeBaseProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("KnowledgeBases"); } } private static final class KnowledgeBasesFileDescriptorSupplier extends KnowledgeBasesBaseDescriptorSupplier { KnowledgeBasesFileDescriptorSupplier() {} } private static final class KnowledgeBasesMethodDescriptorSupplier extends KnowledgeBasesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; KnowledgeBasesMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (KnowledgeBasesGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new KnowledgeBasesFileDescriptorSupplier()) .addMethod(getListKnowledgeBasesMethod()) .addMethod(getGetKnowledgeBaseMethod()) .addMethod(getCreateKnowledgeBaseMethod()) .addMethod(getDeleteKnowledgeBaseMethod()) .addMethod(getUpdateKnowledgeBaseMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
37,895
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/DeployApplicationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/platform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Message for deploying an Application. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.DeployApplicationRequest} */ public final class DeployApplicationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.DeployApplicationRequest) DeployApplicationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeployApplicationRequest.newBuilder() to construct. private DeployApplicationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeployApplicationRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeployApplicationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.PlatformProto .internal_static_google_cloud_visionai_v1_DeployApplicationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.PlatformProto .internal_static_google_cloud_visionai_v1_DeployApplicationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.DeployApplicationRequest.class, com.google.cloud.visionai.v1.DeployApplicationRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 2; private boolean validateOnly_ = false; /** * * * <pre> * If set, validate the request and preview the application graph, but do not * actually deploy it. * </pre> * * <code>bool validate_only = 2;</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } public static final int REQUEST_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENABLE_MONITORING_FIELD_NUMBER = 4; private boolean enableMonitoring_ = false; /** * * * <pre> * Optional. Whether or not to enable monitoring for the application on * deployment. * </pre> * * <code>bool enable_monitoring = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The enableMonitoring. */ @java.lang.Override public boolean getEnableMonitoring() { return enableMonitoring_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (validateOnly_ != false) { output.writeBool(2, validateOnly_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_); } if (enableMonitoring_ != false) { output.writeBool(4, enableMonitoring_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, validateOnly_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_); } if (enableMonitoring_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, enableMonitoring_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.DeployApplicationRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.DeployApplicationRequest other = (com.google.cloud.visionai.v1.DeployApplicationRequest) obj; if (!getName().equals(other.getName())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getEnableMonitoring() != other.getEnableMonitoring()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + ENABLE_MONITORING_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableMonitoring()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.DeployApplicationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.visionai.v1.DeployApplicationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for deploying an Application. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.DeployApplicationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.DeployApplicationRequest) com.google.cloud.visionai.v1.DeployApplicationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.PlatformProto .internal_static_google_cloud_visionai_v1_DeployApplicationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.PlatformProto .internal_static_google_cloud_visionai_v1_DeployApplicationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.DeployApplicationRequest.class, com.google.cloud.visionai.v1.DeployApplicationRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.DeployApplicationRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; validateOnly_ = false; requestId_ = ""; enableMonitoring_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.PlatformProto .internal_static_google_cloud_visionai_v1_DeployApplicationRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.DeployApplicationRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.DeployApplicationRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.DeployApplicationRequest build() { com.google.cloud.visionai.v1.DeployApplicationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.DeployApplicationRequest buildPartial() { com.google.cloud.visionai.v1.DeployApplicationRequest result = new com.google.cloud.visionai.v1.DeployApplicationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.visionai.v1.DeployApplicationRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.validateOnly_ = validateOnly_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.enableMonitoring_ = enableMonitoring_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.DeployApplicationRequest) { return mergeFrom((com.google.cloud.visionai.v1.DeployApplicationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.DeployApplicationRequest other) { if (other == com.google.cloud.visionai.v1.DeployApplicationRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000004; onChanged(); } if (other.getEnableMonitoring() != false) { setEnableMonitoring(other.getEnableMonitoring()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { enableMonitoring_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. the name of the application to retrieve. * Format: * "projects/{project}/locations/{location}/applications/{application}" * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * If set, validate the request and preview the application graph, but do not * actually deploy it. * </pre> * * <code>bool validate_only = 2;</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * If set, validate the request and preview the application graph, but do not * actually deploy it. * </pre> * * <code>bool validate_only = 2;</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If set, validate the request and preview the application graph, but do not * actually deploy it. * </pre> * * <code>bool validate_only = 2;</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000002); validateOnly_ = false; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server will know to * ignore the request if it has already been completed. The server will * guarantee that for at least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if original operation with the same request ID * was received, and if so, will ignore the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private boolean enableMonitoring_; /** * * * <pre> * Optional. Whether or not to enable monitoring for the application on * deployment. * </pre> * * <code>bool enable_monitoring = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The enableMonitoring. */ @java.lang.Override public boolean getEnableMonitoring() { return enableMonitoring_; } /** * * * <pre> * Optional. Whether or not to enable monitoring for the application on * deployment. * </pre> * * <code>bool enable_monitoring = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The enableMonitoring to set. * @return This builder for chaining. */ public Builder setEnableMonitoring(boolean value) { enableMonitoring_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Whether or not to enable monitoring for the application on * deployment. * </pre> * * <code>bool enable_monitoring = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearEnableMonitoring() { bitField0_ = (bitField0_ & ~0x00000008); enableMonitoring_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.DeployApplicationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.DeployApplicationRequest) private static final com.google.cloud.visionai.v1.DeployApplicationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.DeployApplicationRequest(); } public static com.google.cloud.visionai.v1.DeployApplicationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeployApplicationRequest> PARSER = new com.google.protobuf.AbstractParser<DeployApplicationRequest>() { @java.lang.Override public DeployApplicationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeployApplicationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeployApplicationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.DeployApplicationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/pekko-connectors
37,827
jms/src/test/java/docs/javadsl/JmsConnectorsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * license agreements; and to You under the Apache License, version 2.0: * * https://www.apache.org/licenses/LICENSE-2.0 * * This file is part of the Apache Pekko project, which was derived from Akka. */ /* * Copyright (C) since 2016 Lightbend Inc. <https://www.lightbend.com> */ package docs.javadsl; import org.apache.pekko.Done; import org.apache.pekko.NotUsed; import org.apache.pekko.actor.ActorSystem; import org.apache.pekko.japi.Pair; import org.apache.pekko.stream.connectors.jms.Destination; import org.apache.pekko.stream.connectors.jms.*; import org.apache.pekko.stream.connectors.jms.javadsl.JmsConsumer; import org.apache.pekko.stream.connectors.jms.javadsl.JmsConsumerControl; import org.apache.pekko.stream.connectors.jms.javadsl.JmsProducer; import org.apache.pekko.stream.connectors.jms.javadsl.JmsProducerStatus; import org.apache.pekko.stream.connectors.testkit.javadsl.LogCapturingJunit4; import org.apache.pekko.stream.javadsl.Flow; import org.apache.pekko.stream.javadsl.Keep; import org.apache.pekko.stream.javadsl.Sink; import org.apache.pekko.stream.javadsl.Source; import org.apache.pekko.testkit.javadsl.TestKit; import com.typesafe.config.Config; import jmstestkit.JmsBroker; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.activemq.ActiveMQSession; import org.apache.activemq.command.ActiveMQQueue; import org.apache.activemq.command.ActiveMQTextMessage; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import scala.util.Failure; import scala.util.Success; import scala.util.Try; import javax.jms.*; import java.nio.charset.StandardCharsets; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; final class DummyJavaTests implements java.io.Serializable { private static final long serialVersionUID = 1234567L; private final String value; DummyJavaTests(String value) { this.value = value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof DummyJavaTests dummy) { return dummy.value.equals(this.value); } return false; } @Override public int hashCode() { return value != null ? value.hashCode() : 0; } } public class JmsConnectorsTest { @Rule public final LogCapturingJunit4 logCapturing = new LogCapturingJunit4(); private static ActorSystem system; private static Config producerConfig; private static Config browseConfig; @BeforeClass public static void setup() { system = ActorSystem.create(); producerConfig = system.settings().config().getConfig(JmsProducerSettings.configPath()); browseConfig = system.settings().config().getConfig(JmsBrowseSettings.configPath()); } @AfterClass public static void teardown() { TestKit.shutdownActorSystem(system); } private List<JmsTextMessage> createTestMessageList() { List<Integer> intsIn = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); List<JmsTextMessage> msgsIn = new ArrayList<>(); for (Integer n : intsIn) { // #create-messages-with-properties JmsTextMessage message = org.apache.pekko.stream.connectors.jms.JmsTextMessage.create(n.toString()) .withProperty("Number", n) .withProperty("IsOdd", n % 2 == 1) .withProperty("IsEven", n % 2 == 0); // #create-messages-with-properties msgsIn.add(message); } return msgsIn; } @Test public void publishAndConsumeJmsTextMessage() throws Exception { withServer( server -> { // #connection-factory // #text-sink // #text-source javax.jms.ConnectionFactory connectionFactory = server.createConnectionFactory(); // #text-source // #connection-factory // #text-sink // #text-sink Sink<String, CompletionStage<Done>> jmsSink = JmsProducer.textSink( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); List<String> in = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); CompletionStage<Done> finished = Source.from(in).runWith(jmsSink, system); // #text-sink // #text-source Source<String, JmsConsumerControl> jmsSource = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<List<String>> result = jmsSource.take(in.size()).runWith(Sink.seq(), system); // #text-source assertEquals(Done.getInstance(), finished.toCompletableFuture().get(3, TimeUnit.SECONDS)); assertEquals(in, result.toCompletableFuture().get(3, TimeUnit.SECONDS)); }); } @Test public void publishAndConsumeJmsObjectMessage() throws Exception { withServer( server -> { // #connection-factory-object // #object-sink // #object-source ActiveMQConnectionFactory connectionFactory = (ActiveMQConnectionFactory) server.createConnectionFactory(); connectionFactory.setTrustedPackages( Arrays.asList(DummyJavaTests.class.getPackage().getName())); // #object-source // #connection-factory-object // #object-sink // #object-sink Sink<java.io.Serializable, CompletionStage<Done>> jmsSink = JmsProducer.objectSink( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); java.io.Serializable in = new DummyJavaTests("javaTest"); CompletionStage<Done> finished = Source.single(in).runWith(jmsSink, system); // #object-sink // #object-source Source<java.io.Serializable, JmsConsumerControl> jmsSource = JmsConsumer.objectSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<java.io.Serializable> result = jmsSource.take(1).runWith(Sink.head(), system); // #object-source assertEquals(Done.getInstance(), finished.toCompletableFuture().get(3, TimeUnit.SECONDS)); Object resultObject = result.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(resultObject, in); }); } @Test public void publishAndConsumeJmsByteMessage() throws Exception { withServer( server -> { // #bytearray-sink // #bytearray-source ConnectionFactory connectionFactory = server.createConnectionFactory(); // #bytearray-sink // #bytearray-source // #bytearray-sink Sink<byte[], CompletionStage<Done>> jmsSink = JmsProducer.bytesSink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); byte[] in = "ThisIsATest".getBytes(StandardCharsets.UTF_8); CompletionStage<Done> finished = Source.single(in).runWith(jmsSink, system); // #bytearray-sink // #bytearray-source Source<byte[], JmsConsumerControl> jmsSource = JmsConsumer.bytesSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<byte[]> result = jmsSource.take(1).runWith(Sink.head(), system); // #bytearray-source assertEquals(Done.getInstance(), finished.toCompletableFuture().get(3, TimeUnit.SECONDS)); byte[] resultArray = result.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("ThisIsATest", new String(resultArray, StandardCharsets.UTF_8)); }); } @Test public void publishAndConsumeJmsMapMessage() throws Exception { withServer( server -> { // #map-sink // #map-source ConnectionFactory connectionFactory = server.createConnectionFactory(); // #map-sink // #map-source // #map-sink Sink<Map<String, Object>, CompletionStage<Done>> jmsSink = JmsProducer.mapSink( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); Map<String, Object> in = new HashMap<>(); in.put("string value", "value"); in.put("int value", 42); in.put("double value", 43.0); in.put("short value", (short) 7); in.put("boolean value", true); in.put("long value", 7L); in.put("bytearray", "AStringAsByteArray".getBytes(StandardCharsets.UTF_8)); in.put("byte", (byte) 1); CompletionStage<Done> finished = Source.single(in).runWith(jmsSink, system); // #map-sink // #map-source Source<Map<String, Object>, JmsConsumerControl> jmsSource = JmsConsumer.mapSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<Map<String, Object>> resultStage = jmsSource.take(1).runWith(Sink.head(), system); // #map-source Map<String, Object> resultMap = resultStage.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(resultMap.get("string value"), in.get("string value")); assertEquals(resultMap.get("int value"), in.get("int value")); assertEquals(resultMap.get("double value"), in.get("double value")); assertEquals(resultMap.get("short value"), in.get("short value")); assertEquals(resultMap.get("boolean value"), in.get("boolean value")); assertEquals(resultMap.get("long value"), in.get("long value")); assertEquals(resultMap.get("byte"), in.get("byte")); assertEquals(Done.getInstance(), finished.toCompletableFuture().get(3, TimeUnit.SECONDS)); byte[] resultByteArray = (byte[]) resultMap.get("bytearray"); assertEquals(new String(resultByteArray, StandardCharsets.UTF_8), "AStringAsByteArray"); }); } @Test public void publishAndConsumeJmsTextMessagesWithProperties() throws Exception { withServer( server -> { // #jms-source ConnectionFactory connectionFactory = server.createConnectionFactory(); // #jms-source int expectedMessages = 2; // #create-jms-sink Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); CompletionStage<Done> finished = Source.from(Arrays.asList("Message A", "Message B")) .map(JmsTextMessage::create) .runWith(jmsSink, system); // #create-jms-sink // #jms-source Source<javax.jms.Message, JmsConsumerControl> jmsSource = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); Pair<JmsConsumerControl, CompletionStage<List<String>>> controlAndResult = jmsSource .take(expectedMessages) .map( msg -> { if (msg instanceof TextMessage t) { return t.getText(); } else throw new RuntimeException("unexpected message type " + msg.getClass()); }) .toMat(Sink.seq(), Keep.both()) .run(system); // #jms-source CompletionStage<List<String>> result = controlAndResult.second(); List<String> outMessages = result.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals("unexpected number of elements", expectedMessages, outMessages.size()); // #jms-source JmsConsumerControl control = controlAndResult.first(); control.shutdown(); // #jms-source }); } @Test public void publishAndConsumeJmsTextMessagesWithHeaders() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); // #create-messages-with-headers List<JmsTextMessage> msgsIn = createTestMessageList().stream() .map( jmsTextMessage -> jmsTextMessage .withHeader(JmsType.create("type")) .withHeader(JmsCorrelationId.create("correlationId")) .withHeader(JmsReplyTo.queue("test-reply")) .withHeader(JmsTimeToLive.create(999, TimeUnit.SECONDS)) .withHeader(JmsPriority.create(2)) .withHeader(JmsDeliveryMode.create(DeliveryMode.NON_PERSISTENT))) .collect(Collectors.toList()); // #create-messages-with-headers Source.from(msgsIn).runWith(jmsSink, system); Source<Message, JmsConsumerControl> jmsSource = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<List<Message>> result = jmsSource.take(msgsIn.size()).runWith(Sink.seq(), system); List<Message> outMessages = result.toCompletableFuture().get(3, TimeUnit.SECONDS); int msgIdx = 0; for (Message outMsg : outMessages) { assertEquals( outMsg.getIntProperty("Number"), msgsIn.get(msgIdx).properties().get("Number").get()); assertEquals( outMsg.getBooleanProperty("IsOdd"), msgsIn.get(msgIdx).properties().get("IsOdd").get()); assertEquals( outMsg.getBooleanProperty("IsEven"), (msgsIn.get(msgIdx).properties().get("IsEven").get())); assertEquals(outMsg.getJMSType(), "type"); assertEquals(outMsg.getJMSCorrelationID(), "correlationId"); assertEquals(((ActiveMQQueue) outMsg.getJMSReplyTo()).getQueueName(), "test-reply"); assertTrue(outMsg.getJMSExpiration() != 0); assertEquals(2, outMsg.getJMSPriority()); assertEquals(DeliveryMode.NON_PERSISTENT, outMsg.getJMSDeliveryMode()); msgIdx++; } }); } // #custom-destination Function<javax.jms.Session, javax.jms.Destination> createQueue(String destinationName) { return (session) -> { ActiveMQSession amqSession = (ActiveMQSession) session; try { return amqSession.createQueue("my-" + destinationName); } catch (JMSException e) { throw new RuntimeException(e); } }; } // #custom-destination @Test public void useCustomDesination() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory) .withDestination(new CustomDestination("custom", createQueue("custom")))); List<JmsTextMessage> msgsIn = createTestMessageList(); Source.from(msgsIn).runWith(jmsSink, system); // #custom-destination Source<Message, JmsConsumerControl> jmsSource = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory) .withDestination(new CustomDestination("custom", createQueue("custom")))); // #custom-destination CompletionStage<List<Message>> result = jmsSource.take(msgsIn.size()).runWith(Sink.seq(), system); List<Message> outMessages = result.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(10, outMessages.size()); }); } @Test public void publishJmsTextMessagesWithPropertiesAndConsumeThemWithASelector() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); List<JmsTextMessage> msgsIn = createTestMessageList(); Source.from(msgsIn).runWith(jmsSink, system); // #source-with-selector Source<Message, JmsConsumerControl> jmsSource = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory) .withQueue("test") .withSelector("IsOdd = TRUE")); // #source-with-selector List<JmsTextMessage> oddMsgsIn = msgsIn.stream() .filter(msg -> Integer.valueOf(msg.body()) % 2 == 1) .collect(Collectors.toList()); assertEquals(5, oddMsgsIn.size()); CompletionStage<List<Message>> result = jmsSource.take(oddMsgsIn.size()).runWith(Sink.seq(), system); List<Message> outMessages = result.toCompletableFuture().get(4, TimeUnit.SECONDS); int msgIdx = 0; for (Message outMsg : outMessages) { assertEquals( outMsg.getIntProperty("Number"), oddMsgsIn.get(msgIdx).properties().get("Number").get()); assertEquals( outMsg.getBooleanProperty("IsOdd"), oddMsgsIn.get(msgIdx).properties().get("IsOdd").get()); assertEquals( outMsg.getBooleanProperty("IsEven"), (oddMsgsIn.get(msgIdx).properties().get("IsEven").get())); assertEquals(1, outMsg.getIntProperty("Number") % 2); msgIdx++; } }); } @Test public void publishAndConsumeTopic() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); List<String> in = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); List<String> inNumbers = IntStream.range(0, 10).boxed().map(String::valueOf).collect(Collectors.toList()); Sink<String, CompletionStage<Done>> jmsTopicSink = JmsProducer.textSink( JmsProducerSettings.create(producerConfig, connectionFactory).withTopic("topic")); Sink<String, CompletionStage<Done>> jmsTopicSink2 = JmsProducer.textSink( JmsProducerSettings.create(producerConfig, connectionFactory).withTopic("topic")); Source<String, JmsConsumerControl> jmsTopicSource = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withTopic("topic")); Source<String, JmsConsumerControl> jmsTopicSource2 = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withTopic("topic")); CompletionStage<List<String>> result = jmsTopicSource .take(in.size() + inNumbers.size()) .runWith(Sink.seq(), system) .thenApply(l -> l.stream().sorted().collect(Collectors.toList())); CompletionStage<List<String>> result2 = jmsTopicSource2 .take(in.size() + inNumbers.size()) .runWith(Sink.seq(), system) .thenApply(l -> l.stream().sorted().collect(Collectors.toList())); Thread.sleep(500); CompletionStage<Done> finished = Source.from(in).runWith(jmsTopicSink, system); Source.from(inNumbers).runWith(jmsTopicSink2, system); assertEquals( Stream.concat(in.stream(), inNumbers.stream()).sorted().collect(Collectors.toList()), result.toCompletableFuture().get(5, TimeUnit.SECONDS)); assertEquals( Stream.concat(in.stream(), inNumbers.stream()).sorted().collect(Collectors.toList()), result2.toCompletableFuture().get(5, TimeUnit.SECONDS)); }); } @Test public void sinkNormalCompletion() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory) .withQueue("test") .withConnectionRetrySettings( ConnectionRetrySettings.create(system).withMaxRetries(0))); List<JmsTextMessage> msgsIn = createTestMessageList(); CompletionStage<Done> completionFuture = Source.from(msgsIn).runWith(jmsSink, system); Done completed = completionFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); assertEquals(completed, Done.getInstance()); }); } @Test public void sinkExceptionalCompletion() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); CompletionStage<Done> completionFuture = Source.<JmsTextMessage>failed(new RuntimeException("Simulated error")) .runWith(jmsSink, system); try { completionFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); fail("Not completed exceptionally"); } catch (ExecutionException e) { Throwable cause = e.getCause(); assertTrue(cause instanceof RuntimeException); } }); } @Test public void sinkExceptionalCompletionOnDisconnect() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Sink<JmsTextMessage, CompletionStage<Done>> jmsSink = JmsProducer.sink( JmsProducerSettings.create(producerConfig, connectionFactory) .withQueue("test") .withConnectionRetrySettings( ConnectionRetrySettings.create(system).withMaxRetries(0))); List<JmsTextMessage> msgsIn = createTestMessageList(); CompletionStage<Done> completionFuture = Source.from(msgsIn) .mapAsync( 1, m -> CompletableFuture.supplyAsync( () -> { try { Thread.sleep(500); } catch (InterruptedException e) { throw new RuntimeException(e); } return m; })) .runWith(jmsSink, system); try { // Make sure connection got started before stopping. Thread.sleep(500); } catch (InterruptedException e) { fail("Sleep interrupted."); } server.stop(); try { completionFuture.toCompletableFuture().get(3, TimeUnit.SECONDS); fail("Not completed exceptionally"); } catch (ExecutionException e) { Throwable cause = e.getCause(); assertTrue(cause instanceof JMSException); } }); } @Test public void browse() throws Exception { withServer( server -> { // #browse-source ConnectionFactory connectionFactory = server.createConnectionFactory(); // #browse-source Sink<String, CompletionStage<Done>> jmsSink = JmsProducer.textSink( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); List<String> in = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); Source.from(in).runWith(jmsSink, system).toCompletableFuture().get(); // #browse-source Source<javax.jms.Message, NotUsed> browseSource = JmsConsumer.browse( JmsBrowseSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<List<javax.jms.Message>> result = browseSource.runWith(Sink.seq(), system); // #browse-source List<String> resultText = result.toCompletableFuture().get().stream() .map( message -> { try { return ((ActiveMQTextMessage) message).getText(); } catch (JMSException e) { throw new RuntimeException(e); } }) .collect(Collectors.toList()); assertEquals(in, resultText); }); } @Test public void publishAndConsumeDurableTopic() throws Exception { withServer( server -> { ConnectionFactory producerConnectionFactory = server.createConnectionFactory(); // #create-connection-factory-with-client-id ConnectionFactory consumerConnectionFactory = server.createConnectionFactory(); ((ActiveMQConnectionFactory) consumerConnectionFactory) .setClientID(getClass().getSimpleName()); // #create-connection-factory-with-client-id List<String> in = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); Sink<String, CompletionStage<Done>> jmsTopicSink = JmsProducer.textSink( JmsProducerSettings.create(producerConfig, producerConnectionFactory) .withTopic("topic")); // #create-durable-topic-source Source<String, JmsConsumerControl> jmsTopicSource = JmsConsumer.textSource( JmsConsumerSettings.create(system, consumerConnectionFactory) .withDurableTopic("topic", "durable-test")); // #create-durable-topic-source // #run-durable-topic-source CompletionStage<List<String>> result = jmsTopicSource.take(in.size()).runWith(Sink.seq(), system); // #run-durable-topic-source Thread.sleep(500); Source.from(in).runWith(jmsTopicSink, system); assertEquals(in, result.toCompletableFuture().get(5, TimeUnit.SECONDS)); }); } @Test public void producerFlow() throws Exception { withServer( server -> { // #flow-producer ConnectionFactory connectionFactory = server.createConnectionFactory(); Flow<JmsTextMessage, JmsTextMessage, JmsProducerStatus> flow = JmsProducer.flow( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); List<JmsTextMessage> input = createTestMessageList(); CompletionStage<List<JmsTextMessage>> result = Source.from(input).via(flow).runWith(Sink.seq(), system); // #flow-producer assertEquals(input, result.toCompletableFuture().get()); }); } @Test public void directedProducerFlow() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); // #run-directed-flow-producer Flow<JmsTextMessage, JmsTextMessage, JmsProducerStatus> flowSink = JmsProducer.flow( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); List<JmsTextMessage> input = new ArrayList<>(); for (Integer n : Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) { String queueName = (n % 2 == 0) ? "even" : "odd"; input.add(JmsTextMessage.create(n.toString()).toQueue(queueName)); } Source.from(input).via(flowSink).runWith(Sink.seq(), system); // #run-directed-flow-producer CompletionStage<List<Integer>> even = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("even")) .take(5) .map(Integer::parseInt) .runWith(Sink.seq(), system); CompletionStage<List<Integer>> odd = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("odd")) .take(5) .map(Integer::parseInt) .runWith(Sink.seq(), system); assertEquals(Arrays.asList(1, 3, 5, 7, 9), odd.toCompletableFuture().get()); assertEquals(Arrays.asList(2, 4, 6, 8, 10), even.toCompletableFuture().get()); }); } @Test public void failAfterRetry() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); server.stop(); long startTime = System.currentTimeMillis(); CompletionStage<List<Message>> result = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory) .withConnectionRetrySettings( ConnectionRetrySettings.create(system).withMaxRetries(4)) .withQueue("test")) .runWith(Sink.seq(), system); CompletionStage<Try<List<Message>>> tryFuture = result.handle( (l, e) -> { if (l != null) return Success.apply(l); else return Failure.apply(e); }); Try<List<Message>> tryResult = tryFuture.toCompletableFuture().get(); long endTime = System.currentTimeMillis(); assertTrue("Total retry is too short", endTime - startTime > 100L + 400L + 900L + 1600L); assertTrue("Result must be a failure", tryResult.isFailure()); Throwable exception = tryResult.failed().get(); assertTrue( "Did not fail with a ConnectionRetryException", exception instanceof ConnectionRetryException); assertTrue( "Cause of failure is not a JMSException", exception.getCause() instanceof JMSException); }); } @Test public void passThroughMessageEnvelopes() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); // #run-flexi-flow-producer Flow<JmsEnvelope<String>, JmsEnvelope<String>, JmsProducerStatus> jmsProducer = JmsProducer.flexiFlow( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); List<String> data = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); List<JmsEnvelope<String>> input = new ArrayList<>(); for (String s : data) { String passThrough = s; input.add(JmsTextMessage.create(s, passThrough)); } CompletionStage<List<String>> result = Source.from(input) .via(jmsProducer) .map(JmsEnvelope::passThrough) .runWith(Sink.seq(), system); // #run-flexi-flow-producer assertEquals(data, result.toCompletableFuture().get()); }); } @Test public void passThroughEmptyMessageEnvelopes() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Pair<JmsConsumerControl, CompletionStage<List<String>>> switchAndItems = JmsConsumer.textSource( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")) .toMat(Sink.seq(), Keep.both()) .run(system); // #run-flexi-flow-pass-through-producer Flow<JmsEnvelope<String>, JmsEnvelope<String>, JmsProducerStatus> jmsProducer = JmsProducer.flexiFlow( JmsProducerSettings.create(producerConfig, connectionFactory).withQueue("test")); List<String> data = Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); List<JmsEnvelope<String>> input = new ArrayList<>(); for (String s : data) { String passThrough = s; input.add(JmsPassThrough.create(passThrough)); } CompletionStage<List<String>> result = Source.from(input) .via(jmsProducer) .map(JmsEnvelope::passThrough) .runWith(Sink.seq(), system); // #run-flexi-flow-pass-through-producer assertEquals(data, result.toCompletableFuture().get()); Thread.sleep(500); switchAndItems.first().shutdown(); assertTrue(switchAndItems.second().toCompletableFuture().get().isEmpty()); }); } @Test public void requestReplyWithTempQueues() throws Exception { withServer( server -> { ConnectionFactory connectionFactory = server.createConnectionFactory(); Connection connection = connectionFactory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); TemporaryQueue tempQueue = session.createTemporaryQueue(); Destination tempQueueDest = org.apache.pekko.stream.connectors.jms.Destination.createDestination(tempQueue); String message = "ThisIsATest"; Function<String, String> reverse = (in) -> new StringBuilder(in).reverse().toString(); String correlationId = UUID.randomUUID().toString(); Sink<JmsTextMessage, CompletionStage<Done>> toRespondSink = JmsProducer.sink( JmsProducerSettings.create(system, connectionFactory).withQueue("test")); CompletionStage<Done> toRespondStreamCompletion = Source.single( JmsTextMessage.create(message) .withHeader(JmsCorrelationId.create(correlationId)) .withHeader(new JmsReplyTo(tempQueueDest))) .runWith(toRespondSink, system); // #request-reply JmsConsumerControl respondStreamControl = JmsConsumer.create( JmsConsumerSettings.create(system, connectionFactory).withQueue("test")) .map(JmsMessageFactory::create) .collectType(JmsTextMessage.class) .map( textMessage -> { JmsTextMessage m = JmsTextMessage.create(reverse.apply(textMessage.body())); for (JmsHeader h : textMessage.getHeaders()) if (h.getClass().equals(JmsReplyTo.class)) m = m.to(((JmsReplyTo) h).jmsDestination()); else if (h.getClass().equals(JmsCorrelationId.class)) m = m.withHeader(h); return m; }) .via( JmsProducer.flow( JmsProducerSettings.create(system, connectionFactory) .withQueue("ignored"))) .to(Sink.ignore()) .run(system); // #request-reply // getting ConnectionRetryException when trying to listen using streams, assuming it's // because a different session can't listen on the original session's TemporaryQueue MessageConsumer consumer = session.createConsumer(tempQueue); Message msg = consumer.receive(5000); assertEquals(Done.done(), toRespondStreamCompletion.toCompletableFuture().get()); assertTrue(msg instanceof TextMessage); assertEquals(correlationId, msg.getJMSCorrelationID()); assertEquals(reverse.apply(message), ((TextMessage) msg).getText()); respondStreamControl.shutdown(); connection.close(); }); } private void withServer(ConsumerChecked<JmsBroker> test) throws Exception { JmsBroker broker = JmsBroker.apply(); try { test.accept(broker); Thread.sleep(100); } finally { if (broker.isStarted()) { broker.stop(); } } } @FunctionalInterface private interface ConsumerChecked<T> { void accept(T elt) throws Exception; } }
apache/sis
38,264
endorsed/src/org.apache.sis.feature/main/org/apache/sis/geometry/wrapper/jts/Wrapper.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.geometry.wrapper.jts; import java.awt.Shape; import java.util.Arrays; import java.util.ArrayList; import java.util.EnumMap; import java.util.Iterator; import java.util.OptionalInt; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.IntFunction; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.CoordinateSequence; import org.locationtech.jts.geom.Envelope; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryCollection; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LineString; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.MultiLineString; import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.Polygon; import org.locationtech.jts.io.WKTWriter; import org.locationtech.jts.simplify.DouglasPeuckerSimplifier; import org.locationtech.jts.simplify.TopologyPreservingSimplifier; import org.opengis.util.FactoryException; import org.opengis.geometry.DirectPosition; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.CoordinateOperation; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.apache.sis.referencing.internal.shared.ReferencingUtilities; import org.apache.sis.geometry.DirectPosition2D; import org.apache.sis.geometry.GeneralDirectPosition; import org.apache.sis.geometry.GeneralEnvelope; import org.apache.sis.geometry.wrapper.Geometries; import org.apache.sis.geometry.wrapper.GeometryType; import org.apache.sis.geometry.wrapper.GeometryWrapper; import org.apache.sis.util.UnconvertibleObjectException; import org.apache.sis.util.ArgumentChecks; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.Debug; import org.apache.sis.util.resources.Errors; import org.apache.sis.filter.sqlmm.SQLMM; import static org.apache.sis.geometry.wrapper.GeometryType.POINT; // Specific to the main branch: import org.apache.sis.pending.geoapi.filter.SpatialOperatorName; import org.apache.sis.pending.geoapi.filter.DistanceOperatorName; /** * The wrapper of Java Topology Suite (JTS) geometries. * * @author Johann Sorel (Geomatys) * @author Martin Desruisseaux (Geomatys) * @author Alexis Manin (Geomatys) */ final class Wrapper extends GeometryWrapper { /** * The wrapped implementation. */ private final Geometry geometry; /** * Creates a new wrapper around the given geometry. * * @param geometry the geometry to wrap. * @throws FactoryException if the <abbr>CRS</abbr> cannot be created from the <abbr>SRID</abbr> code. */ Wrapper(final Geometry geometry) throws FactoryException { this.geometry = geometry; crs = JTS.getCoordinateReferenceSystem(geometry); } /** * Creates a new wrapper with the same <abbr>CRS</abbr> than the given wrapper. * * @param source the source wrapper from which is derived the geometry. * @param geometry the geometry to wrap. */ private Wrapper(final Wrapper source, final Geometry geometry) { this.geometry = geometry; this.crs = source.crs; JTS.copyMetadata(source.geometry, geometry); } /** * Returns the implementation-dependent factory of geometric object. */ @Override protected Geometries<Geometry> factory() { return Factory.INSTANCE; } /** * Returns the geometry specified at construction time. */ @Override protected Object implementation() { return geometry; } /** * Returns the Spatial Reference System Identifier (SRID) if available. * This is <em>not</em> necessarily an EPSG code, even it is common practice to use * the same numerical values as EPSG. Note that the absence of SRID does not mean * that {@link #getCoordinateReferenceSystem()} would return no CRS. */ @Override public OptionalInt getSRID() { final int srid = geometry.getSRID(); return (srid != 0) ? OptionalInt.of(srid) : OptionalInt.empty(); } /** * Sets the coordinate reference system. This method overwrites any previous user object. * This is okay for the context in which Apache SIS uses this method, which is only for * newly created geometries. */ @Override public void setCoordinateReferenceSystem(final CoordinateReferenceSystem crs) { super.setCoordinateReferenceSystem(crs); JTS.setCoordinateReferenceSystem(geometry, crs); } /** * Returns the dimension of the coordinates that define this geometry. */ @Override public int getCoordinateDimension() { return getCoordinatesDimension(geometry); } /** * Gets the number of dimensions of geometry vertex (sequence of coordinate tuples), which can be 2 or 3. * Note that this is different than the {@linkplain Geometry#getDimension() geometry topological dimension}, * which can be 0, 1 or 2. * * @param geometry the geometry for which to get <em>vertex</em> (not topological) dimension. * @return vertex dimension of the given geometry. * @throws IllegalArgumentException if the type of the given geometry is not recognized. */ private static int getCoordinatesDimension(final Geometry geometry) { final CoordinateSequence cs; if (geometry instanceof Point) { // Most efficient method (no allocation) in JTS 1.18. cs = ((Point) geometry).getCoordinateSequence(); } else if (geometry instanceof LineString) { // Most efficient method (no allocation) in JTS 1.18. cs = ((LineString) geometry).getCoordinateSequence(); } else if (geometry instanceof Polygon) { return getCoordinatesDimension(((Polygon) geometry).getExteriorRing()); } else if (geometry instanceof GeometryCollection) { final GeometryCollection gc = (GeometryCollection) geometry; final int n = gc.getNumGeometries(); if (n == 0) { return Factory.TRIDIMENSIONAL; // Undefined coordinates, JTS assumes 3 for empty geometries. } for (int i=0; i<n; i++) { // If at least one geometry is 3D, consider the whole geometry as 3D. final int d = getCoordinatesDimension(gc.getGeometryN(i)); if (d > Factory.BIDIMENSIONAL) return d; } return Factory.BIDIMENSIONAL; } else { throw new IllegalArgumentException(Errors.format(Errors.Keys.UnknownType_1, geometry.getGeometryType())); } return cs.getDimension(); } /** * Returns the envelope of the wrapped JTS geometry. Never null, but may be empty. * In current implementation, <var>z</var> values of three-dimensional envelopes * are {@link Double#NaN}. It may change in a future version if we have a way to * get those <var>z</var> values from a JTS object. */ @Override public GeneralEnvelope getEnvelope() { final Envelope bounds = geometry.getEnvelopeInternal(); final CoordinateReferenceSystem crs = getCoordinateReferenceSystem(); final var env = (crs != null) ? new GeneralEnvelope(crs) : new GeneralEnvelope(Factory.BIDIMENSIONAL); env.setToNaN(); if (!bounds.isNull()) { env.setRange(0, bounds.getMinX(), bounds.getMaxX()); env.setRange(1, bounds.getMinY(), bounds.getMaxY()); } return env; } /** * Returns the centroid of the wrapped geometry as a direct position. */ @Override public DirectPosition getCentroid() { final Coordinate c = geometry.getCentroid().getCoordinate(); @SuppressWarnings("LocalVariableHidesMemberVariable") final CoordinateReferenceSystem crs = getCoordinateReferenceSystem(); if (crs == null) { final double z = c.getZ(); if (!Double.isNaN(z)) { return new GeneralDirectPosition(c.x, c.y, z); } } else if (ReferencingUtilities.getDimension(crs) != Factory.BIDIMENSIONAL) { final var point = new GeneralDirectPosition(crs); point.setCoordinate(0, c.x); point.setCoordinate(1, c.y); point.setCoordinate(2, c.getZ()); return point; } return new DirectPosition2D(crs, c.x, c.y); } /** * If the wrapped geometry is a point, returns its coordinates. Otherwise returns {@code null}. * If non-null, the returned array may have a length of 2 or 3. */ @Override public double[] getPointCoordinates() { if (!(geometry instanceof Point)) { return null; } final Coordinate pt = ((Point) geometry).getCoordinate(); final double z = pt.getZ(); final double[] coord; if (Double.isNaN(z)) { coord = new double[Factory.BIDIMENSIONAL]; } else { coord = new double[Factory.TRIDIMENSIONAL]; coord[2] = z; } coord[1] = pt.y; coord[0] = pt.x; return coord; } /** * Returns all coordinate tuples in the wrapped geometry. * This method is currently used for testing purpose only. */ @Debug @Override public double[] getAllCoordinates() { final Coordinate[] points = geometry.getCoordinates(); final var coordinates = new double[points.length * Factory.BIDIMENSIONAL]; int i = 0; for (final Coordinate p : points) { coordinates[i++] = p.x; coordinates[i++] = p.y; } return coordinates; } /** * Merges a sequence of points or paths after the wrapped geometry. * * @throws ClassCastException if an element in the iterator is not a JTS geometry. */ @Override public Geometry mergePolylines(final Iterator<?> polylines) { final var coordinates = new ArrayList<Coordinate>(); final var lines = new ArrayList<Geometry>(); boolean isFloat = true; add: for (Geometry next = geometry;;) { if (next instanceof Point) { final Coordinate pt = ((Point) next).getCoordinate(); if (!Double.isNaN(pt.x) && !Double.isNaN(pt.y)) { isFloat = Factory.isFloat(isFloat, (Point) next); coordinates.add(pt); } else { Factory.INSTANCE.addPolyline(coordinates, lines, false, isFloat); coordinates.clear(); isFloat = true; } } else { final int n = next.getNumGeometries(); for (int i=0; i<n; i++) { final var ls = (LineString) next.getGeometryN(i); if (coordinates.isEmpty()) { lines.add(ls); } else { if (isFloat) isFloat = Factory.isFloat(ls.getCoordinateSequence()); coordinates.addAll(Arrays.asList(ls.getCoordinates())); Factory.INSTANCE.addPolyline(coordinates, lines, false, isFloat); coordinates.clear(); isFloat = true; } } } /* * `polylines.hasNext()` check is conceptually part of `for` instruction, * except that we need to skip this condition during the first iteration. */ do if (!polylines.hasNext()) break add; while ((next = (Geometry) polylines.next()) == null); } Factory.INSTANCE.addPolyline(coordinates, lines, false, isFloat); return Factory.INSTANCE.groupPolylines(lines, false, isFloat); } /** * Applies a filter predicate between this geometry and another geometry. * This method assumes that the two geometries are in the same CRS (this is not verified). * * <p><b>Note:</b> {@link SpatialOperatorName#BBOX} is implemented by {@code NOT DISJOINT}. * It is caller's responsibility to ensure that one of the geometries is rectangular, * for example by a call to {@link Geometry#getEnvelope()}.</p> * * @throws ClassCastException if the given wrapper is not for the same geometry library. */ @Override protected boolean predicateSameCRS(final SpatialOperatorName type, final GeometryWrapper other) { final int ordinal = type.ordinal(); if (ordinal >= 0 && ordinal < PREDICATES.length) { final BiPredicate<Geometry,Geometry> op = PREDICATES[ordinal]; if (op != null) { return op.test(geometry, ((Wrapper) other).geometry); } } return super.predicateSameCRS(type, other); } /** * All predicates recognized by {@link #predicate(SpatialOperatorName, Geometry)}. * Array indices are {@link SpatialOperatorName#ordinal()} values. */ @SuppressWarnings({"unchecked","rawtypes"}) private static final BiPredicate<Geometry,Geometry>[] PREDICATES = new BiPredicate[SpatialOperatorName.OVERLAPS.ordinal() + 1]; static { PREDICATES[SpatialOperatorName.BBOX .ordinal()] = (a,b) -> !a.disjoint(b); PREDICATES[SpatialOperatorName.EQUALS .ordinal()] = Geometry::equalsTopo; PREDICATES[SpatialOperatorName.DISJOINT .ordinal()] = Geometry::disjoint; PREDICATES[SpatialOperatorName.INTERSECTS.ordinal()] = Geometry::intersects; PREDICATES[SpatialOperatorName.TOUCHES .ordinal()] = Geometry::touches; PREDICATES[SpatialOperatorName.CROSSES .ordinal()] = Geometry::crosses; PREDICATES[SpatialOperatorName.WITHIN .ordinal()] = Geometry::within; PREDICATES[SpatialOperatorName.CONTAINS .ordinal()] = Geometry::contains; PREDICATES[SpatialOperatorName.OVERLAPS .ordinal()] = Geometry::overlaps; } /** * Applies a filter predicate between this geometry and another geometry within a given distance. * This method assumes that the two geometries are in the same CRS and that the unit of measurement * is the same for {@code distance} than for axes (this is not verified). * * @throws ClassCastException if the given wrapper is not for the same geometry library. */ @Override protected boolean predicateSameCRS(final DistanceOperatorName type, final GeometryWrapper other, final double distance) { boolean reverse = (type != DistanceOperatorName.WITHIN); if (reverse && type != DistanceOperatorName.BEYOND) { return super.predicateSameCRS(type, other, distance); } return geometry.isWithinDistance(((Wrapper) other).geometry, distance) ^ reverse; } /** * Applies a SQLMM operation on this geometry. * * @param operation the SQLMM operation to apply. * @param other the other geometry, or {@code null} if the operation requires only one geometry. * @param argument an operation-specific argument, or {@code null} if not applicable. * @return result of the specified operation. * @throws ClassCastException if the operation can only be executed on some specific argument types * (for example geometries that are polylines) and one of the argument is not of that type. */ @Override protected Object operationSameCRS(final SQLMM operation, final GeometryWrapper other, final Object argument) { /* * For all operation producing a geometry, the result is collected for post-processing. * For all other kinds of value, the result is returned directly in the switch statement. */ final Geometry result; switch (operation) { case ST_IsMeasured: return Boolean.FALSE; case ST_Dimension: return geometry.getDimension(); case ST_SRID: return geometry.getSRID(); case ST_IsEmpty: return geometry.isEmpty(); case ST_IsSimple: return geometry.isSimple(); case ST_IsValid: return geometry.isValid(); case ST_Envelope: return getEnvelope(); case ST_Boundary: result = geometry.getBoundary(); break; case ST_ConvexHull: result = geometry.convexHull(); break; case ST_Buffer: result = geometry.buffer(((Number) argument).doubleValue()); break; case ST_Intersection: result = geometry.intersection (((Wrapper) other).geometry); break; case ST_Union: result = geometry.union (((Wrapper) other).geometry); break; case ST_Difference: result = geometry.difference (((Wrapper) other).geometry); break; case ST_SymDifference: result = geometry.symDifference(((Wrapper) other).geometry); break; case ST_Distance: return geometry.distance (((Wrapper) other).geometry); case ST_Equals: return geometry.equalsTopo (((Wrapper) other).geometry); case ST_Relate: return geometry.relate (((Wrapper) other).geometry, argument.toString()); case ST_Disjoint: return geometry.disjoint (((Wrapper) other).geometry); case ST_Intersects: return geometry.intersects (((Wrapper) other).geometry); case ST_Touches: return geometry.touches (((Wrapper) other).geometry); case ST_Crosses: return geometry.crosses (((Wrapper) other).geometry); case ST_Within: return geometry.within (((Wrapper) other).geometry); case ST_Contains: return geometry.contains (((Wrapper) other).geometry); case ST_Overlaps: return geometry.overlaps (((Wrapper) other).geometry); case ST_AsText: return new WKTWriter().write(geometry); // WKTWriter() constructor is cheap. case ST_AsBinary: return FilteringContext.writeWKB(geometry); case ST_X: return ((Point) geometry).getX(); case ST_Y: return ((Point) geometry).getY(); case ST_Z: return ((Point) geometry).getCoordinate().getZ(); case ST_ToLineString: return geometry; // JTS does not have curves. case ST_NumGeometries: return geometry.getNumGeometries(); case ST_NumPoints: return geometry.getNumPoints(); case ST_PointN: result = ((LineString) geometry).getPointN(toIndex(argument)); break; case ST_StartPoint: result = ((LineString) geometry).getStartPoint(); break; case ST_EndPoint: result = ((LineString) geometry).getEndPoint(); break; case ST_IsClosed: return ((LineString) geometry).isClosed(); case ST_IsRing: return ((LineString) geometry).isRing(); case ST_Perimeter: // Fallthrough: length is the perimeter for polygons. case ST_Length: return geometry.getLength(); case ST_Area: return geometry.getArea(); case ST_Centroid: result = geometry.getCentroid(); break; case ST_PointOnSurface: result = geometry.getInteriorPoint(); break; case ST_ExteriorRing: result = ((Polygon) geometry).getExteriorRing(); break; case ST_InteriorRingN: result = ((Polygon) geometry).getInteriorRingN(toIndex(argument)); break; case ST_NumInteriorRings: return ((Polygon) geometry).getNumInteriorRing(); case ST_GeometryN: result = geometry.getGeometryN(toIndex(argument)); break; case ST_ToPoint: case ST_ToPolygon: case ST_ToMultiPoint: case ST_ToMultiLine: case ST_ToMultiPolygon: case ST_ToGeomColl: { final GeometryType target = operation.getGeometryType().get(); final Class<?> type = getGeometryClass(target); if (type.isInstance(geometry)) { return geometry; } result = convert(target); break; } case ST_Is3D: { final Coordinate c = geometry.getCoordinate(); return (c != null) ? !Double.isNaN(c.z) : null; } case ST_CoordDim: { final Coordinate c = geometry.getCoordinate(); return (c != null) ? Double.isNaN(c.z) ? Geometries.BIDIMENSIONAL : Geometries.TRIDIMENSIONAL : null; } case ST_GeometryType: { for (int i=0; i < TYPES.length; i++) { if (TYPES[i].isInstance(geometry)) { return SQLMM_NAMES[i]; } } return null; } case ST_ExplicitPoint: { final Coordinate c = ((Point) geometry).getCoordinate(); if (c == null) return ArraysExt.EMPTY_DOUBLE; final double x = c.getX(); final double y = c.getY(); final double z = c.getZ(); return Double.isNaN(z) ? new double[] {x, y} : new double[] {x, y, z}; } case ST_Simplify: { final double distance = ((Number) argument).doubleValue(); result = DouglasPeuckerSimplifier.simplify(geometry, distance); break; } case ST_SimplifyPreserveTopology: { final double distance = ((Number) argument).doubleValue(); result = TopologyPreservingSimplifier.simplify(geometry, distance); break; } default: return super.operationSameCRS(operation, other, argument); } JTS.copyMetadata(geometry, result); return result; } /** * The types of JTS objects to be recognized by the SQLMM {@code ST_GeometryType} operation. */ private static final Class<?>[] TYPES = { Point.class, LineString.class, Polygon.class, MultiPoint.class, MultiLineString.class, MultiPolygon.class, GeometryCollection.class, Geometry.class, }; /** * The SQLMM names for the types listed in the {@link #TYPES} array. */ private static final String[] SQLMM_NAMES = { "ST_Point", "ST_LineString", "ST_Polygon", "ST_MultiPoint", "ST_MultiLineString", "ST_MultiPolygon", "ST_GeomCollection", "ST_Geometry" }; /** * Converts the given argument to a zero-based index. * * @throws ClassCastException if the argument is not a string or a number. * @throws NumberFormatException if the argument is an unparseable string. * @throws IllegalArgumentException if the argument is zero or negative. */ private static int toIndex(final Object argument) { final int i = (argument instanceof CharSequence) ? Integer.parseInt(argument.toString()) : ((Number) argument).intValue(); // ClassCastException is part of this method contract. ArgumentChecks.ensureStrictlyPositive("index", i); return i - 1; } /** * Converts the wrapped geometry to the specified type. * If the geometry is already of that type, it is returned unchanged. * Otherwise coordinates are copied in a new geometry of the requested type. * * <p>The following conversions are illegal and will cause an {@link IllegalArgumentException} to be thrown:</p> * <ul> * <li>From point to polyline or polygon (exception thrown by JTS itself).</li> * <li>From geometry collection (except multi-point) to polyline.</li> * <li>From geometry collection (except multi-point and multi-line string) to polygon.</li> * <li>From geometry collection containing nested collections.</li> * </ul> * * The conversion from {@link MultiLineString} to {@link Polygon} is defined as following: * the first {@link LineString} is taken as the exterior {@link LinearRing} and all others * {@link LineString}s are interior {@link LinearRing}s. * This rule is defined by some SQLMM operations. * * @param target the desired type. * @return the converted geometry. * @throws IllegalArgumentException if the geometry cannot be converted to the specified type. */ @Override public GeometryWrapper toGeometryType(final GeometryType target) { if (!getGeometryClass(target).isInstance(geometry)) { final Geometry result = convert(target); if (result != geometry) { return new Wrapper(this, result); } } return this; } /** * Mapping from implementation-neutral geometry types to their JTS classes. */ private static final EnumMap<GeometryType, Class<?>> GEOMETRY_CLASSES = new EnumMap<>(GeometryType.class); static { final var m = GEOMETRY_CLASSES; /* * Following types are intentionally omitted: * - GEOMETRY because it is the first enumeration value, which is too early for `isAssignableFrom(Class)` checks. * - TRIANGLE because it is not a geometry in JTS class hierarchy. */ m.put(GeometryType.POINT, Point.class); m.put(GeometryType.LINESTRING, LineString.class); m.put(GeometryType.POLYGON, Polygon.class); m.put(GeometryType.MULTIPOINT, MultiPoint.class); m.put(GeometryType.MULTILINESTRING, MultiLineString.class); m.put(GeometryType.MULTIPOLYGON, MultiPolygon.class); m.put(GeometryType.GEOMETRYCOLLECTION, GeometryCollection.class); // If other collection types are added, verify if they are enumerated before `GEOMETRYCOLLECTION`. } /** * Returns the JTS geometry class for the given implementation-neutral type. * If the given type has no specific JTS classes, then the generic {@link Geometry} is returned. * * @param type type of geometry for which the class is desired. * @return implementation class for the geometry of the specified type. */ static Class<?> getGeometryClass(final GeometryType type) { return GEOMETRY_CLASSES.getOrDefault(type, Geometry.class); } /** * Returns the implementation-neutral type for the given JTS class or array class. * If the given class is not recognized, then {@link GeometryType#GEOMETRY} is returned. * * @param type the JTS class, or an array type having a JTS class as components. * @return the implementation-neutral type for the given class, or {@code GEOMETRY} if not recognized. */ static GeometryType getGeometryType(final Class<?> type) { Class<?> componentType = type.getComponentType(); if (componentType == null) componentType = type; for (final EnumMap.Entry<GeometryType, Class<?>> entry : GEOMETRY_CLASSES.entrySet()) { if (entry.getValue().isAssignableFrom(componentType)) { final GeometryType gt = entry.getKey(); if (!gt.isCollection && componentType != type) { var ct = gt.collection(); // If the given type was an array, return a geometry collection type. if (ct != null) return ct; } return gt; } } return GeometryType.GEOMETRY; } /** * Converts the given geometry to the specified type without wrapper. * This is the implementation of {@link #toGeometryType(GeometryType)}. * Caller should invoke {@link JTS#copyMetadata(Geometry, Geometry)} after this method. * * @param target the desired type. * @return the converted geometry. * @throws IllegalArgumentException if the geometry cannot be converted to the specified type. */ private Geometry convert(final GeometryType target) { final GeometryFactory factory = geometry.getFactory(); switch (target) { case POINT: { return geometry.getCentroid(); } case LINESTRING: { if (isCollection(geometry)) break; return factory.createLineString(geometry.getCoordinates()); } case POLYGON: { if (!geometry.isEmpty() && geometry instanceof MultiLineString) { // SQLMM `ST_BdMPolyFromText` and `ST_BdMPolyFromWKB` behavior. final MultiLineString lines = (MultiLineString) geometry; final LinearRing exterior = factory.createLinearRing(lines.getGeometryN(0).getCoordinates()); final LinearRing[] interiors = new LinearRing[lines.getNumGeometries() - 1]; for (int i=0; i < interiors.length;) { interiors[i] = factory.createLinearRing(lines.getGeometryN(++i).getCoordinates()); } return factory.createPolygon(exterior, interiors); } if (isCollection(geometry)) break; return factory.createPolygon(geometry.getCoordinates()); } case MULTIPOINT: { return (geometry instanceof Point) ? factory.createMultiPoint(new Point[] {(Point) geometry}) : factory.createMultiPointFromCoords(geometry.getCoordinates()); } case MULTILINESTRING: { return toCollection(factory, LineString.class, LineString[]::new, GeometryFactory::createLineString, GeometryFactory::createMultiLineString); } case MULTIPOLYGON: { return toCollection(factory, Polygon.class, Polygon[]::new, GeometryFactory::createPolygon, GeometryFactory::createMultiPolygon); } case GEOMETRYCOLLECTION: { if (geometry instanceof Point) { return factory.createMultiPoint(new Point[] {(Point) geometry}); } else if (geometry instanceof LineString) { return factory.createMultiLineString(new LineString[] {(LineString) geometry}); } else if (geometry instanceof Polygon) { return factory.createMultiPolygon(new Polygon[] {(Polygon) geometry}); } break; } } throw new UnconvertibleObjectException(Errors.format(Errors.Keys.CanNotConvertFromType_2, geometry.getClass(), getGeometryClass(target))); } /** * Converts a single geometry or a geometry collection to a collection of another type. * This is a helper method for {@link #toGeometryType(GeometryType)}. * * @param <T> the compile-time value of {@code type}. * @param factory the factory to use for creating new geometries. * @param type the type of geometry components to put in a collection. * @param newArray constructor for a new array of given {@code type}. * @param newComponent constructor for a geometry component of given {@code type}. * @param newCollection constructor for a geometry collection from an array of components/ * @return the geometry collection created from the given type. * @throws IllegalArgumentException if a geometry collection contains nested collection. */ private <T extends Geometry> GeometryCollection toCollection( final GeometryFactory factory, final Class<T> type, final IntFunction<T[]> newArray, final BiFunction<GeometryFactory,Coordinate[],T> newComponent, final BiFunction<GeometryFactory,T[],GeometryCollection> newCollection) { final T[] components = newArray.apply(geometry.getNumGeometries()); for (int i=0; i<components.length; i++) { final Geometry c = geometry.getGeometryN(i); if (type.isInstance(c)) { components[i] = type.cast(c); } else if (isCollection(c)) { throw new IllegalArgumentException(Errors.format(Errors.Keys.NestedElementNotAllowed_1, GeometryCollection.class)); } else { components[i] = newComponent.apply(factory, c.getCoordinates()); } } return newCollection.apply(factory, components); } /** * Returns {@code true} if the given geometry is a collection other than {@link MultiPoint}. * Collections are handled recursively by {@code getLineStrings(…)} and {@code getPolygons(…)}. */ private static boolean isCollection(final Geometry geometry) { return (geometry.getNumGeometries() >= 2) && !(geometry instanceof MultiPoint); } /** * Transforms this geometry using the given coordinate operation. * If the operation is {@code null}, then the geometry is returned unchanged. * If the geometry uses a different CRS than the source CRS of the given operation * and {@code validate} is {@code true}, * then a new operation to the target CRS will be automatically computed. * * @param operation the coordinate operation to apply, or {@code null}. * @param validate whether to validate the operation source CRS. * @throws FactoryException if transformation to the target CRS cannot be found. * @throws TransformException if the geometry cannot be transformed. */ @Override public GeometryWrapper transform(final CoordinateOperation operation, final boolean validate) throws FactoryException, TransformException { return rewrap(JTS.transform(geometry, operation, validate)); } /** * Transforms this geometry to the specified Coordinate Reference System (CRS). * If the given CRS is null or is the same CRS as current one, the geometry is returned unchanged. * If the geometry has no Coordinate Reference System, then the geometry is returned unchanged. * * @param targetCRS the target coordinate reference system, or {@code null}. * @return the transformed geometry (may be the same geometry instance), or {@code null}. * @throws TransformException if this geometry cannot be transformed. */ @Override public GeometryWrapper transform(final CoordinateReferenceSystem targetCRS) throws TransformException { try { return rewrap(JTS.transform(geometry, targetCRS)); } catch (FactoryException e) { /* * We wrap that exception because `Geometry.transform(…)` does not declare `FactoryException`. * We may revisit in a future version if `Geometry.transform(…)` method declaration is updated. */ throw new TransformException(e.getMessage(), e); } } /** * Transforms this geometry using the given transform. * If the transform is {@code null}, then the geometry is returned unchanged. * Otherwise, a new geometry is returned without CRS. * * @param transform the math transform to apply, or {@code null}. * @return the transformed geometry (may be the same geometry instance, but never {@code null}). * @throws TransformException if the geometry cannot be transformed. */ @Override public GeometryWrapper transform(final MathTransform transform) throws FactoryException, TransformException { return rewrap(JTS.transform(geometry, transform)); } /** * Returns the given geometry in a new wrapper, or {@code this} if {@code result} is same as current geometry. * If a new wrapper is created, then its <abbr>CRS</abbr> will be inferred from the geometry <abbr>SRID</abbr> * or user properties. * * @param result the geometry computed by a JTS operation. * @return wrapper for the given geometry. May be {@code this}. * @throws FactoryException if the <abbr>CRS</abbr> cannot be created from the <abbr>SRID</abbr> code. */ private Wrapper rewrap(final Geometry result) throws FactoryException { return (result == geometry) ? this : new Wrapper(result); } /** * Returns a view over the JTS geometry as a Java2D shape. Changes in the JTS geometry * after this method call may be reflected in the returned shape in an unspecified way. * * @return a view over the geometry as a Java2D shape. */ @Override public Shape toJava2D() { return JTS.asShape(geometry); } /** * Returns the WKT representation of the wrapped geometry. */ @Override public String formatWKT(final double flatness) { return geometry.toText(); } }
oracle/nosql
37,943
kvmain/src/main/java/oracle/nosql/common/kv/drl/LimiterManager.java
/*- * Copyright (C) 2011, 2021 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.nosql.common.kv.drl; import static oracle.nosql.nson.util.PackedInteger.readSortedInt; import static oracle.nosql.nson.util.PackedInteger.getReadSortedIntLength; import static oracle.nosql.nson.util.PackedInteger.readSortedLong; import static oracle.nosql.nson.util.PackedInteger.getReadSortedLongLength; import static oracle.nosql.nson.util.PackedInteger.writeSortedInt; import static oracle.nosql.nson.util.PackedInteger.writeSortedLong; import java.lang.Runnable; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import oracle.kv.Consistency; import oracle.kv.Durability; import oracle.kv.ReadThroughputException; import oracle.kv.StatementResult; import oracle.kv.Version; import oracle.kv.WriteThroughputException; import oracle.kv.table.FieldValue; import oracle.kv.table.PrimaryKey; import oracle.kv.table.ReadOptions; import oracle.kv.table.Row; import oracle.kv.table.WriteOptions; import oracle.kv.impl.api.KVStoreImpl; import oracle.kv.impl.api.table.TableAPIImpl; import oracle.kv.impl.api.table.TableImpl; import oracle.kv.impl.api.table.TableLimits; import oracle.nosql.common.contextlogger.LogContext; import oracle.nosql.common.ratelimit.SimpleRateLimiter; import oracle.nosql.common.sklogger.SkLogger; /** * A class to manage rate limiting and delaying responses */ public class LimiterManager { /* one for each DRL KVStore */ private static class DRLTable { TableImpl table; Version lastVersion; } /* map of storeName->DRLTable for backing stores */ private HashMap<String, DRLTable> drlTablesMap; /* map of storeName->KVStoreImpl for backing stores */ private HashMap<String, KVStoreImpl> storeMap; private final SkLogger logger; public static class Config { /* use distributed mode: synchronize to backing store */ public boolean useDistributed; /* backing store table name (required) */ public String drlTableName; /* backing store namespace (optional) */ public String drlNamespace; /* desired DRL update interval */ public long drlUpdateIntervalMs; /* minimum update interval */ public long drlMinUpdateIntervalMs; /* use N milliseconds of limits "credit" from the past before limiting */ public int limitCreditMs; /* * rateFactor allows us to tweak the resultant throughput in case * the logic ends up being too strict or too weak. For example, if * most tables are not getting their full allocated throughput, we can * set this value higher (to, say, 1.1 from default 1.0). */ public double rateFactor; /* size of the delay pool (max number of concurrent threads) */ public int delayPoolSize; /* sets reasonable defaults */ public Config() { useDistributed = false; drlTableName = "drlTable"; drlNamespace = null; drlUpdateIntervalMs = 200; /* 5 times per second */ drlMinUpdateIntervalMs = 20; limitCreditMs = 5000; /* five seconds */ rateFactor = 1.0; delayPoolSize = 5; } void copyFrom(Config other) { this.useDistributed = other.useDistributed; this.drlTableName = other.drlTableName; this.drlNamespace = other.drlNamespace; this.drlUpdateIntervalMs = other.drlUpdateIntervalMs; this.drlMinUpdateIntervalMs = other.drlMinUpdateIntervalMs; this.limitCreditMs = other.limitCreditMs; this.rateFactor = other.rateFactor; this.delayPoolSize = other.delayPoolSize; } } private Config config; /* main map of rate limiters */ private HashMap<LimiterMapKey, LimiterMapEntry> limiterMap; /* thread pool for managing delayed responses */ private ScheduledThreadPoolExecutor delayPool; /* max number of queued items for delay pool */ private final static int MAX_DELAY_QUEUE_SIZE = 1500; /* start time of most recent execution */ private long executionStart; private Random rand; /* DRL service ID: randomly generated */ private long serviceID; /* one timestamp for each of the other services */ private HashMap<String, Long> lastReads; /* Simple stats */ public static class Stats { /* raw number of delays due to limiting */ public long delayedResponses; /* raw milliseconds delayed */ public long delayedMillis; /* number of internal store errors */ public long storeErrors; /* number of tables active */ public int activeTables; /* number of tables that have gone over their limits */ public int overLimitTables; } /* stats accumulated since last collection */ private Stats curstats; /* used when reading DRL records from the store(s) */ private ReadOptions DRLReadOptions; /* used when writing DRL records to the store(s) */ private WriteOptions DRLWriteOptions; /** * Create and start a new Limiter Manager. * * This implements the core rate limiting logic: * - A RateLimiter is created for each unique tenant/table/op (r/w). * - after each operation, this manager is called with the consumed * units. If adding the units to the limiter throws it over the * limit, the response will be delayed by the proper amount of time * to bring the limiter back under its limit. * * @param logger Logger instance to use for messages * @param config configuration parameters. Copied into internal object. */ public LimiterManager(SkLogger logger, Config config) { this.logger = logger; this.config = new Config(); this.config.copyFrom(config); // TODO this.config.validate(); /* main map */ limiterMap = new HashMap<LimiterMapKey, LimiterMapEntry>(100); /* drl tables map (backing stores) */ drlTablesMap = new HashMap<String, DRLTable>(10); /* store map (backing stores) */ storeMap = new HashMap<String, KVStoreImpl>(10); delayPool = new ScheduledThreadPoolExecutor(config.delayPoolSize); rand = new Random(); curstats = new Stats(); if (config.useDistributed == true && config.drlUpdateIntervalMs > 0) { // generate unique ID this.serviceID = rand.nextLong(); this.lastReads = new HashMap<String, Long>(); /* start a task to update distributed limiter values */ try { delayPool.scheduleWithFixedDelay(() -> { try { this.writeAndReadDataToStores(); this.delayNextExecution(); } catch (Exception e) { info("Error: Could not update DRL " + "backing store: " + e); curstats.storeErrors++; } }, 100, config.drlMinUpdateIntervalMs, TimeUnit.MILLISECONDS); info("Started distributed updater: interval=" + config.drlUpdateIntervalMs + "ms"); } catch (RejectedExecutionException ree) { /* TODO: what now? should this be fatal? */ logger.severe("Can't start DRL backing store updater: " + ree); } } else { /* set up a simple timer to clear old entries */ try { delayPool.scheduleAtFixedRate(() -> {this.clearOldEntries();}, 0, 1000, TimeUnit.MILLISECONDS); info("Started DRL in non-distributed mode"); } catch (RejectedExecutionException ree) { /* TODO: what now? should this be fatal? */ logger.severe("Can't start DRL limiter map cleaner: " + ree); } } DRLReadOptions = new ReadOptions(Consistency.ABSOLUTE, 50, TimeUnit.MILLISECONDS); DRLWriteOptions = new WriteOptions(Durability.COMMIT_NO_SYNC, 50, TimeUnit.MILLISECONDS); info("Rate limiting enabled: rateFactor=" + config.rateFactor + " limitCreditMs=" + config.limitCreditMs); } /** * Collect and reset internal stats. * * @param stats stats object to fill with current stats */ public void collectStats(Stats stats) { if (stats != null && limiterMap != null) { stats.activeTables = limiterMap.size(); stats.overLimitTables = 0; Set<LimiterMapKey> keys = limiterMap.keySet(); for (LimiterMapKey lmKey : keys) { LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry == null) { continue; } if (lmEntry.overLimit) { lmEntry.overLimit = false; stats.overLimitTables++; } } } if (stats != null) { stats.delayedResponses = curstats.delayedResponses; stats.delayedMillis = curstats.delayedMillis; stats.storeErrors = curstats.storeErrors; fine("active=" + stats.activeTables + " overLimit=" + stats.overLimitTables + " delayed=" + stats.delayedResponses + " (" + stats.delayedMillis + "ms)" + " errors=" + stats.storeErrors); } curstats.delayedResponses = 0; curstats.delayedMillis = 0; curstats.storeErrors = 0; } private TableImpl createDRLTable(KVStoreImpl store, String storeName) { final String tName; if (config.drlNamespace != null) { tName = config.drlNamespace + ":" + config.drlTableName; } else { tName = config.drlTableName; } String statement = "CREATE TABLE IF NOT EXISTS " + tName + "(id LONG, sid INTEGER, data BINARY, " + "PRIMARY KEY(SHARD(sid), id)) " + "USING TTL 1 hours"; try { info("creating table '" + tName + "' in store " + storeName); StatementResult res = store.executeSync(statement); if (! res.isSuccessful()) { info("Error: could not create table '" + tName + "' in store " + storeName + ": " + res.getErrorMessage()); curstats.storeErrors++; return null; } TableAPIImpl tableApi = store.getTableAPIImpl(); return (TableImpl)tableApi.getTable(config.drlNamespace, config.drlTableName, true /* bypassCache */); } catch (Exception e) { info("Error: trying to create table '" + tName + "' in store " + storeName + ": " + e.getMessage()); curstats.storeErrors++; return null; } } private synchronized DRLTable getDRLTableForStore(String storeName, KVStoreImpl store) { DRLTable drlTable = drlTablesMap.get(storeName); if (drlTable != null) { return drlTable; } TableAPIImpl tableApi = store.getTableAPIImpl(); TableImpl table = (TableImpl) tableApi.getTable(config.drlNamespace, config.drlTableName, true /* bypassCache */); if (table == null) { // try creating table table = createDRLTable(store, storeName); } if (table == null) { info("could not get DRL table instance for store name '" + storeName + "'"); return null; } drlTable = new DRLTable(); drlTable.table = table; drlTable.lastVersion = null; drlTablesMap.put(storeName, drlTable); return drlTable; } private void writeBackingStoreEntry(String storeName, byte[] buf) { if (buf == null) { return; } /* * Write a row into the store, using serviceID as the * id and the entry as the binary value. * Use PutIfVersion with previous version to detect if * multiple services are mistakenly using the same id. * * TODO: would be nice if we had a way to not have this record * be replicated or persisted to disk in the kvstore. */ DRLTable drlTable = null; try { KVStoreImpl store = storeMap.get(storeName); if (store == null) { info("Error: could not get store instance for store name '" + storeName + "'"); curstats.storeErrors++; return; } drlTable = drlTablesMap.get(storeName); if (drlTable == null) { drlTable = getDRLTableForStore(storeName, store); } if (drlTable == null) { return; } // TODO: pre-create Row (save garbage) Row row = drlTable.table.createRow(); row.put("id", serviceID); row.put("sid", 1); // fixed shard key row.put("data", buf); TableAPIImpl api = store.getTableAPIImpl(); if (drlTable.lastVersion == null) { drlTable.lastVersion = api.put(row, null, DRLWriteOptions); } else { drlTable.lastVersion = api.putIfVersion(row, drlTable.lastVersion, null, DRLWriteOptions); if (drlTable.lastVersion == null) { info("Error: collision in DRL key for store " + storeName + ": id=" + serviceID); serviceID = rand.nextLong(); info("Generated new serviceID=" + serviceID); curstats.storeErrors++; } } } catch (Exception e) { info("Error: writing rate limiter data to backing store " + storeName + ": " + e); if (drlTable != null) { drlTable.lastVersion = null; } curstats.storeErrors++; } } /** * Read entries from the backing store. * @param storeName the name of the store to use * @return true if we found a valid entry * false otherwise */ private boolean readBackingStoreEntries(String storeName) { List<Row> rows = null; try { KVStoreImpl store = storeMap.get(storeName); if (store == null) { info("Error: could not get store instance for store name '" + storeName + "'"); curstats.storeErrors++; return false; } DRLTable drlTable = drlTablesMap.get(storeName); if (drlTable == null) { drlTable = getDRLTableForStore(storeName, store); } if (drlTable == null) { return false; } /* * Read all rows in a single multiGet operation using * single shard key */ PrimaryKey key = drlTable.table.createPrimaryKey(); key.put("sid", 1); // fixed shard key TableAPIImpl api = store.getTableAPIImpl(); fine("reading entries from backing store " + storeName); rows = api.multiGet(key, null, DRLReadOptions); } catch (Exception e) { info("Error: reading rate limiter entries from backing store " + storeName + ": " + e); curstats.storeErrors++; return false; } /* it's possible there are no records */ if (rows == null) { fine("no entries found for store " + storeName); return false; } boolean foundValidEntry = false; for (Row row : rows) { byte[] buf = null; long id = 0; try { FieldValue idfv = row.get("id"); if (idfv == null) { fine("Error: no 'id' found in entry for store " + storeName); curstats.storeErrors++; continue; } id = idfv.asLong().get(); if (id == serviceID) { fine("Skipping own entry for store " + storeName); continue; } FieldValue datafv = row.get("data"); if (datafv == null) { fine("Error: no 'data' found in id=" + (id % 10000) + " entry for store " + storeName); curstats.storeErrors++; continue; } buf = datafv.asBinary().get(); } catch (Exception e) { info("Error: could not get binary limiter data from store " + storeName + ": " + e); curstats.storeErrors++; continue; } if (buf == null || buf.length == 0) { /* empty record, this is possible */ fine("empty 'data' found in entry " + (id % 10000) + " for store " + storeName); continue; } if (processBackingStoreEntry(buf, id, storeName)) { foundValidEntry = true; } } return foundValidEntry; } private boolean processBackingStoreEntry(byte[] buf, long id, String storeName) { LimiterMapKey lmKey = new LimiterMapKey(storeName, 0); long now = System.currentTimeMillis(); try { int off = 0; /* the first long is the timestamp this record was written */ long thenMs = readSortedLong(buf, off); off += getReadSortedLongLength(buf, off); /* of this record is over 10 minutes old, skip it */ /* this depends on ntp clock synchronization across services */ if ((thenMs + 600_000) < now) { fine("skipping old record for id=" + (id % 10000) + ": store=" + storeName); return true; /* this is a valid entry */ } /* if we've already read this record, skip it */ fine("time from id=" + (id % 10000) + " record: " + thenMs); String lrkey = storeName + "_" + id; Long val = lastReads.get(lrkey); if (val != null && thenMs <= val.longValue()) { fine("skipping already-read record for id=" + (id % 10000) + ": store=" + storeName); return true; /* this is a valid entry */ } lastReads.put(lrkey, thenMs); long RUs, WUs; while (off < buf.length) { lmKey.tableId = readSortedLong(buf, off); off += getReadSortedLongLength(buf, off); RUs = readSortedInt(buf, off); off += getReadSortedIntLength(buf, off); WUs = readSortedInt(buf, off); off += getReadSortedIntLength(buf, off); if (RUs <= 0 && WUs <= 0) { continue; } LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry == null) { /* no need to create? We don't have the limits */ fine("skipping id=" + (id % 10000) + " entry for tableId=" + lmKey.tableId + " store=" + storeName); continue; } /* for internal testing, we may look up our own records */ if (id == serviceID) { fine("skipping own entry: tableId=" + lmKey.tableId + " RUs=" + RUs + " WUs=" + WUs); continue; } if (RUs > 0) { lmEntry.readLimiter.consumeExternally(RUs); lmEntry.lastUpdate = now; fine("applied units from id=" + (id % 10000) + ": " + "tableId=" + lmKey.tableId + " RUS=" + RUs + " rate=" + String.format("%.3f", lmEntry.readLimiter.getCurrentRate())); } if (WUs > 0) { lmEntry.writeLimiter.consumeExternally(WUs); lmEntry.lastUpdate = now; fine("applied units from id=" + (id % 10000) + ": " + "tableId=" + lmKey.tableId + " WUS=" + WUs + " rate=" + String.format("%.3f", lmEntry.writeLimiter.getCurrentRate())); } } } catch (Exception e) { info("Error: deserializing rate limiter entry for id=" + (id % 10000) + " store=" + storeName + ": " + e); curstats.storeErrors++; return false; } return true; } /* * Clear old entries from map. This is only used if we are not * doing distributed rate limiting using backing stores. */ private void clearOldEntries() { if (limiterMap == null) { return; } /* * walk limiter map. Clear old entries. */ long now = System.currentTimeMillis(); Iterator<LimiterMapKey> it = limiterMap.keySet().iterator(); while (it.hasNext()) { LimiterMapKey lmKey = it.next(); LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry != null && (lmEntry.lastUpdate + 60_000) < now) { fine("Removing limiter entry: tableId=" + lmKey.tableId); it.remove(); } } } /* * Try to keep the backing store updates happening at consistent * intervals. The interval may change based on how much usage the * stores are getting and how many tables are operating close to * or over their limits (close to limits == faster execution). */ private void delayNextExecution() { long now = System.currentTimeMillis(); // TODO: adjust drlUpdateIntervalMs based on usage: if no tables close to // limits, slow down the rate of distributed updates, up to some maximum. long executionTime = now - executionStart; long sleepTime = (config.drlUpdateIntervalMs - executionTime) - config.drlMinUpdateIntervalMs; if (sleepTime <= 0) { fine("Execution falling behind"); return; } try { Thread.sleep(sleepTime); } catch (Throwable t) {} } /* * Update backing store(s) with our information, and read backing * store(s) for other services' information. */ private void writeAndReadDataToStores() { executionStart = System.currentTimeMillis(); if (limiterMap == null || limiterMap.size() == 0) { return; } /* walk limiter map. Get a set of currently used store names. */ Set<String> storeNames = new HashSet<String>(); Set<LimiterMapKey> keys = limiterMap.keySet(); for (LimiterMapKey lmKey : keys) { storeNames.add(lmKey.storeName); } /* * create one max size byte array to avoid more GC. * 19 = max serialized size of tableId(long) + RUs(int) + WUs(int) * 9 = max serialized size of timestamp(long) */ int maxBytes = (limiterMap.size() * 19) + 9; byte[] buf = new byte[maxBytes]; /* * Write records to backing store(s): * Walk each store. Write entries to backing store if there are * any active tables for that store. * * Note: createBackingStoreEntry() also removes any old map elements. */ for (String storeName : storeNames) { byte[] entry = createBackingStoreEntry(storeName, buf); writeBackingStoreEntry(storeName, entry); } /* * Read records from backing store(s): * Walk each store. get records for each other service for each store * and apply each entry to local rate limiters. */ for (String storeName : storeNames) { readBackingStoreEntries(storeName); } } /* * Create the serialized backing store entry for a specific store. * Side effect: remove map entries more than 10 minutes old */ private byte[] createBackingStoreEntry(String storeName, byte[] buf) { long now = System.currentTimeMillis(); int startOff = writeSortedLong(buf, 0, now); /* timestamp */ int off = startOff; Iterator<LimiterMapKey> it = limiterMap.keySet().iterator(); while (it.hasNext()) { LimiterMapKey lmKey = it.next(); /* if entry is not for this store, skip it */ if (storeName.equals(lmKey.storeName) == false) { continue; } LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry == null) { continue; } /* if entry hasn't been used in more than 10 minutes, remove it */ if ((lmEntry.lastUpdate + 600_000) < now) { fine("Removing limiter entry: tableId=" + lmKey.tableId + " store=" + storeName); it.remove(); continue; } /* don't write records for tables with no usage in this period */ if (lmEntry.readUnitsUsed == 0 && lmEntry.writeUnitsUsed == 0) { continue; } fine("Creating element: tableId=" + lmKey.tableId + " RUs=" + lmEntry.readUnitsUsed + " WUs=" + lmEntry.writeUnitsUsed); off = writeSortedLong(buf, off, lmKey.tableId); off = writeSortedInt(buf, off, lmEntry.readUnitsUsed); off = writeSortedInt(buf, off, lmEntry.writeUnitsUsed); lmEntry.readUnitsUsed = lmEntry.writeUnitsUsed = 0; } if (off == startOff) { return null; } fine("writing entry at " + now); /* sigh. we have to make a copy. */ return Arrays.copyOf(buf, off); } public void shutDown() { if (limiterMap == null) { return; } limiterMap.clear(); delayPool.shutdownNow(); limiterMap = null; } private void fine(String msg, LogContext lc) { logger.fine("DRL: S" + (serviceID%10000) + " " + msg, lc); } private void fine(String msg) { fine(msg, null); } private void info(String msg, LogContext lc) { logger.info("DRL: S" + (serviceID%10000) + " " + msg, lc); } private void info(String msg) { info(msg, null); } /** * Return the topmost parent table for this table. * If this table isn't a child, this table is returned. * Note this is a recursive function. * TODO: find out if this is expensive: Does getParent() make any * external requests? */ private TableImpl getTopmostParentTable(TableImpl table) { TableImpl parent = (TableImpl) table.getParent(); if (parent == null || parent == table || parent.getId() == table.getId()) { return table; } return getTopmostParentTable(parent); } /** * Throw a read or write throttling error if this table is * over its (read/write) limit. */ public void throwIfOverLimit(String tableName, TableImpl table, String storeName, boolean doesReads, boolean doesWrites) { /* no need if we're not managing rate limiters */ if (limiterMap == null) { return; } if (table == null || tableName == null || storeName == null) { return; } table = getTopmostParentTable(table); TableLimits limits = table.getTableLimits(); if (limits == null || limits.hasThroughputLimits() == false) { return; } /* * Look up limiters in map. If not there, return false. */ LimiterMapKey lmKey = new LimiterMapKey(storeName, table.getId()); LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry == null) { return; } /* if a limiter can't consume 0 units, it is over its limit */ if (doesReads && (lmEntry.readLimiter.consumeExternally(0) > 0)) { throw new ReadThroughputException(tableName, 0, /* readRate, unused */ limits.getReadLimit(), "" /* msg, unused */); } if (doesWrites && (lmEntry.writeLimiter.consumeExternally(0) > 0)) { throw new WriteThroughputException(tableName, 0, /* writeRate, unused */ limits.getWriteLimit(), "" /* msg, unused */); } } /** * Increment used units for a table, possibly scheduling a task for * delaying response to client based on table limits. * Note: timeoutMs is used as-is: caller should adjust this based * on its known client-side latency. * @return if 0, limits not above any threshold; continue as normal. * if > 0, delayTask is scheduled to run after this many ms. */ public int delayResponse(TableImpl table, String storeName, KVStoreImpl store, int readUnitsUsed, int writeUnitsUsed, int timeoutMs, LogContext lc, Runnable delayTask) { /* no need if we're not managing rate limiters */ if (limiterMap == null) { return 0; } if (table == null || storeName == null || store == null || (readUnitsUsed == 0 && writeUnitsUsed == 0)) { return 0; } table = getTopmostParentTable(table); TableLimits limits = table.getTableLimits(); if (limits == null || limits.hasThroughputLimits() == false) { return 0; } /* keep storeName -> store mapping for DRL backing stores */ if (config.useDistributed) { storeMap.put(storeName, store); } int delayMs = 0; long now = System.currentTimeMillis(); /* * Look up limiters in map. If they don't exist, create them. */ LimiterMapKey lmKey = new LimiterMapKey(storeName, table.getId()); LimiterMapEntry lmEntry = limiterMap.get(lmKey); if (lmEntry == null) { lmEntry = new LimiterMapEntry(limits.getReadLimit(), limits.getWriteLimit()); fine("Adding new entry for tableId=" + table.getId(), lc); limiterMap.put(lmKey, lmEntry); } double readLimit = (double)limits.getReadLimit() * config.rateFactor; if (readUnitsUsed != 0 && readLimit > 0.0) { /* update limiter if limits have changed */ if (lmEntry.readLimiter.getLimitPerSecond() != readLimit) { lmEntry.readLimiter.setLimitPerSecond(readLimit); } delayMs = lmEntry.readLimiter.consumeExternally(readUnitsUsed); /* no real need for synchronization here */ lmEntry.readUnitsUsed += readUnitsUsed; lmEntry.lastUpdate = now; } double writeLimit = (double)limits.getWriteLimit() * config.rateFactor; if (writeUnitsUsed != 0 && writeLimit > 0.0) { /* update limiter if limits have changed */ if (lmEntry.writeLimiter.getLimitPerSecond() != writeLimit) { lmEntry.writeLimiter.setLimitPerSecond(writeLimit); } int ms = lmEntry.writeLimiter.consumeExternally(writeUnitsUsed); if (ms > delayMs) { delayMs = ms; } /* no real need for synchronization here */ lmEntry.writeUnitsUsed += writeUnitsUsed; lmEntry.lastUpdate = now; } /* if going to delay, set stats overLimit flag */ if (delayMs > 0) { lmEntry.overLimit = true; } /* make sure delay time is below given timeout */ if (delayMs > (timeoutMs - 1)) { delayMs = timeoutMs - 1; } if (delayMs <= 0) { return 0; } /* if delay pool size is very large, just skip delaying */ if (delayPool.getQueue().size() > MAX_DELAY_QUEUE_SIZE) { fine("Could not delay response: too many " + "queued responses", lc); /* TODO: add this to stats */ return 0; } /* delay the response */ try { delayPool.schedule(delayTask, delayMs, TimeUnit.MILLISECONDS); } catch (RejectedExecutionException ree) { /* couldn't schedule: just do now */ fine("Could not delay response: " + ree, lc); return 0; } fine("delayed response for " + delayMs + "ms due to rate limiting", lc); curstats.delayedResponses++; curstats.delayedMillis += delayMs; return delayMs; } private class LimiterMapEntry { public SimpleRateLimiter readLimiter; public SimpleRateLimiter writeLimiter; public int readUnitsUsed; /* since last write to backing store */ public int writeUnitsUsed; /* since last write to backing store */ public long lastUpdate; public boolean overLimit; /* since last stats update (1M intervals) */ public LimiterMapEntry(int readLimit, int writeLimit) { /* allow for a small configured burst of errors before limiting */ this.readLimiter = new SimpleRateLimiter( (double)readLimit * config.rateFactor, (double)config.limitCreditMs / 1000.0); this.writeLimiter = new SimpleRateLimiter( (double)writeLimit * config.rateFactor, (double)config.limitCreditMs / 1000.0); this.readLimiter.setCurrentRate(0.0); this.writeLimiter.setCurrentRate(0.0); this.readUnitsUsed = 0; this.writeUnitsUsed = 0; this.lastUpdate = System.currentTimeMillis(); this.overLimit = false; } } private class LimiterMapKey implements Comparable<LimiterMapKey> { public String storeName; public long tableId; public LimiterMapKey(String storeName, long tableId) { this.storeName = storeName; this.tableId = tableId; } @Override public int compareTo(LimiterMapKey other) { if (this.tableId > other.tableId) { return 1; } if (this.tableId < other.tableId) { return -1; } return this.storeName.compareTo(other.storeName); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } LimiterMapKey other = (LimiterMapKey)obj; return (compareTo(other) == 0); } @Override public int hashCode() { return storeName.hashCode() + (int)tableId; } } }
apache/hadoop
37,781
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.nodelabels; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEvent; import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEventType; import org.apache.hadoop.yarn.nodelabels.event.RemoveClusterNodeLabels; import org.apache.hadoop.yarn.nodelabels.event.StoreNewClusterNodeLabels; import org.apache.hadoop.yarn.nodelabels.event.UpdateNodeToLabelsMappingsEvent; import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; @Private public class CommonNodeLabelsManager extends AbstractService { protected static final Logger LOG = LoggerFactory.getLogger(CommonNodeLabelsManager.class); public static final Set<String> EMPTY_STRING_SET = Collections .unmodifiableSet(new HashSet<String>(0)); public static final Set<NodeLabel> EMPTY_NODELABEL_SET = Collections .unmodifiableSet(new HashSet<NodeLabel>(0)); public static final String ANY = "*"; public static final Set<String> ACCESS_ANY_LABEL_SET = ImmutableSet.of(ANY); public static final int WILDCARD_PORT = 0; // Flag to identify startup for removelabel private boolean initNodeLabelStoreInProgress = false; /** * Error messages */ @VisibleForTesting public static final String NODE_LABELS_NOT_ENABLED_ERR = "Node-label-based scheduling is disabled. Please check " + YarnConfiguration.NODE_LABELS_ENABLED; /** * If a user doesn't specify label of a queue or node, it belongs * DEFAULT_LABEL */ public static final String NO_LABEL = ""; protected Dispatcher dispatcher; protected ConcurrentMap<String, RMNodeLabel> labelCollections = new ConcurrentHashMap<String, RMNodeLabel>(); protected ConcurrentMap<String, Host> nodeCollections = new ConcurrentHashMap<String, Host>(); private ConcurrentMap<NodeId, Boolean> isNodeLabelFromHost = new ConcurrentHashMap<NodeId, Boolean>(); protected RMNodeLabel noNodeLabel; protected final ReadLock readLock; protected final WriteLock writeLock; protected NodeLabelsStore store; private boolean nodeLabelsEnabled = false; private boolean isCentralizedNodeLabelConfiguration = true; /** * A <code>Host</code> can have multiple <code>Node</code>s */ public static class Host { public Set<String> labels; public Map<NodeId, Node> nms; protected Host() { labels = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>()); nms = new ConcurrentHashMap<NodeId, Node>(); } public Host copy() { Host c = new Host(); c.labels = new HashSet<String>(labels); for (Entry<NodeId, Node> entry : nms.entrySet()) { c.nms.put(entry.getKey(), entry.getValue().copy()); } return c; } } protected static class Node { public Set<String> labels; public Resource resource; public boolean running; public NodeId nodeId; protected Node(NodeId nodeid) { labels = null; resource = Resource.newInstance(0, 0); running = false; nodeId = nodeid; } public Node copy() { Node c = new Node(nodeId); if (labels != null) { c.labels = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>()); c.labels.addAll(labels); } else { c.labels = null; } c.resource = Resources.clone(resource); c.running = running; return c; } } private enum NodeLabelUpdateOperation { ADD, REMOVE, REPLACE } private final class ForwardingEventHandler implements EventHandler<NodeLabelsStoreEvent> { @Override public void handle(NodeLabelsStoreEvent event) { handleStoreEvent(event); } } // Dispatcher related code protected void handleStoreEvent(NodeLabelsStoreEvent event) { try { switch (event.getType()) { case ADD_LABELS: StoreNewClusterNodeLabels storeNewClusterNodeLabelsEvent = (StoreNewClusterNodeLabels) event; store.storeNewClusterNodeLabels(storeNewClusterNodeLabelsEvent .getLabels()); break; case REMOVE_LABELS: RemoveClusterNodeLabels removeClusterNodeLabelsEvent = (RemoveClusterNodeLabels) event; store.removeClusterNodeLabels(removeClusterNodeLabelsEvent.getLabels()); break; case STORE_NODE_TO_LABELS: UpdateNodeToLabelsMappingsEvent updateNodeToLabelsMappingsEvent = (UpdateNodeToLabelsMappingsEvent) event; store.updateNodeToLabelsMappings(updateNodeToLabelsMappingsEvent .getNodeToLabels()); break; } } catch (IOException e) { LOG.error("Failed to store label modification to storage"); throw new YarnRuntimeException(e); } } public CommonNodeLabelsManager() { super(CommonNodeLabelsManager.class.getName()); ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); writeLock = lock.writeLock(); } // for UT purpose protected void initDispatcher(Configuration conf) { // create async handler dispatcher = new AsyncDispatcher("NodeLabelManager dispatcher"); AsyncDispatcher asyncDispatcher = (AsyncDispatcher) dispatcher; asyncDispatcher.init(conf); asyncDispatcher.setDrainEventsOnStop(); } @Override protected void serviceInit(Configuration conf) throws Exception { // set if node labels enabled nodeLabelsEnabled = YarnConfiguration.areNodeLabelsEnabled(conf); isCentralizedNodeLabelConfiguration = YarnConfiguration.isCentralizedNodeLabelConfiguration(conf); noNodeLabel = new RMNodeLabel(NO_LABEL); labelCollections.put(NO_LABEL, noNodeLabel); } /** * @return the isStartup */ protected boolean isInitNodeLabelStoreInProgress() { return initNodeLabelStoreInProgress; } /** * @return true if node label configuration type is not distributed. */ public boolean isCentralizedConfiguration() { return isCentralizedNodeLabelConfiguration; } protected void initNodeLabelStore(Configuration conf) throws Exception { this.store = ReflectionUtils .newInstance( conf.getClass(YarnConfiguration.FS_NODE_LABELS_STORE_IMPL_CLASS, FileSystemNodeLabelsStore.class, NodeLabelsStore.class), conf); this.store.init(conf, this); this.store.recover(); } // for UT purpose protected void startDispatcher() { // start dispatcher AsyncDispatcher asyncDispatcher = (AsyncDispatcher) dispatcher; asyncDispatcher.start(); } @Override protected void serviceStart() throws Exception { if (nodeLabelsEnabled) { setInitNodeLabelStoreInProgress(true); initNodeLabelStore(getConfig()); setInitNodeLabelStoreInProgress(false); } // init dispatcher only when service start, because recover will happen in // service init, we don't want to trigger any event handling at that time. initDispatcher(getConfig()); if (null != dispatcher) { dispatcher.register(NodeLabelsStoreEventType.class, new ForwardingEventHandler()); } startDispatcher(); } // for UT purpose protected void stopDispatcher() { AsyncDispatcher asyncDispatcher = (AsyncDispatcher) dispatcher; if (null != asyncDispatcher) { asyncDispatcher.stop(); } } @Override protected void serviceStop() throws Exception { // finalize store stopDispatcher(); // only close store when we enabled store persistent if (null != store) { store.close(); } } @SuppressWarnings("unchecked") public void addToCluserNodeLabels(Collection<NodeLabel> labels) throws IOException { if (!nodeLabelsEnabled) { LOG.error(NODE_LABELS_NOT_ENABLED_ERR); throw new IOException(NODE_LABELS_NOT_ENABLED_ERR); } if (null == labels || labels.isEmpty()) { return; } List<NodeLabel> newLabels = new ArrayList<NodeLabel>(); normalizeNodeLabels(labels); // check any mismatch in exclusivity no mismatch with skip checkExclusivityMatch(labels); // do a check before actual adding them, will throw exception if any of them // doesn't meet label name requirement for (NodeLabel label : labels) { NodeLabelUtil.checkAndThrowLabelName(label.getName()); } for (NodeLabel label : labels) { // shouldn't overwrite it to avoid changing the Label.resource if (this.labelCollections.get(label.getName()) == null) { this.labelCollections.put(label.getName(), new RMNodeLabel(label)); newLabels.add(label); } } if (null != dispatcher && !newLabels.isEmpty()) { dispatcher.getEventHandler().handle( new StoreNewClusterNodeLabels(newLabels)); } LOG.info("Add labels: [" + StringUtils.join(labels.iterator(), ",") + "]"); } /** * Add multiple node labels to repository. * * @param labels * new node labels added * @throws IOException io error occur. */ @VisibleForTesting public void addToCluserNodeLabelsWithDefaultExclusivity(Set<String> labels) throws IOException { Set<NodeLabel> nodeLabels = new HashSet<NodeLabel>(); for (String label : labels) { nodeLabels.add(NodeLabel.newInstance(label)); } addToCluserNodeLabels(nodeLabels); } protected void checkAddLabelsToNode( Map<NodeId, Set<String>> addedLabelsToNode) throws IOException { if (null == addedLabelsToNode || addedLabelsToNode.isEmpty()) { return; } // check all labels being added existed Set<String> knownLabels = labelCollections.keySet(); for (Entry<NodeId, Set<String>> entry : addedLabelsToNode.entrySet()) { NodeId nodeId = entry.getKey(); Set<String> labels = entry.getValue(); if (!knownLabels.containsAll(labels)) { String msg = "Not all labels being added contained by known " + "label collections, please check" + ", added labels=[" + StringUtils.join(labels, ",") + "]"; LOG.error(msg); throw new IOException(msg); } // In YARN-2694, we temporarily disable user add more than 1 labels on a // same host if (!labels.isEmpty()) { Set<String> newLabels = new HashSet<String>(getLabelsByNode(nodeId)); newLabels.addAll(labels); // we don't allow number of labels on a node > 1 after added labels if (newLabels.size() > 1) { String msg = String.format( "%d labels specified on host=%s after add labels to node" + ", please note that we do not support specifying multiple" + " labels on a single host for now.", newLabels.size(), nodeId.getHost()); LOG.error(msg); throw new IOException(msg); } } } } /** * add more labels to nodes. * * @param addedLabelsToNode node {@literal ->} labels map * @throws IOException io error occur. */ public void addLabelsToNode(Map<NodeId, Set<String>> addedLabelsToNode) throws IOException { if (!nodeLabelsEnabled) { LOG.error(NODE_LABELS_NOT_ENABLED_ERR); throw new IOException(NODE_LABELS_NOT_ENABLED_ERR); } addedLabelsToNode = normalizeNodeIdToLabels(addedLabelsToNode); checkAddLabelsToNode(addedLabelsToNode); internalUpdateLabelsOnNodes(addedLabelsToNode, NodeLabelUpdateOperation.ADD); } protected void checkRemoveFromClusterNodeLabels( Collection<String> labelsToRemove) throws IOException { if (null == labelsToRemove || labelsToRemove.isEmpty()) { return; } // Check if label to remove doesn't existed or null/empty, will throw // exception if any of labels to remove doesn't meet requirement for (String label : labelsToRemove) { label = normalizeLabel(label); if (label == null || label.isEmpty()) { throw new IOException("Label to be removed is null or empty"); } if (!labelCollections.containsKey(label)) { throw new IOException("Node label=" + label + " to be removed doesn't existed in cluster " + "node labels collection."); } } } @SuppressWarnings("unchecked") protected void internalRemoveFromClusterNodeLabels(Collection<String> labelsToRemove) { // remove labels from nodes for (Map.Entry<String,Host> nodeEntry : nodeCollections.entrySet()) { Host host = nodeEntry.getValue(); if (null != host) { host.labels.removeAll(labelsToRemove); for (Node nm : host.nms.values()) { if (nm.labels != null) { nm.labels.removeAll(labelsToRemove); } } } } // remove labels from node labels collection for (String label : labelsToRemove) { labelCollections.remove(label); } // create event to remove labels if (null != dispatcher) { dispatcher.getEventHandler().handle( new RemoveClusterNodeLabels(labelsToRemove)); } LOG.info("Remove labels: [" + StringUtils.join(labelsToRemove.iterator(), ",") + "]"); } /** * Remove multiple node labels from repository * * @param labelsToRemove * node labels to remove * @throws IOException io error occur. */ public void removeFromClusterNodeLabels(Collection<String> labelsToRemove) throws IOException { if (!nodeLabelsEnabled) { LOG.error(NODE_LABELS_NOT_ENABLED_ERR); throw new IOException(NODE_LABELS_NOT_ENABLED_ERR); } labelsToRemove = normalizeLabels(labelsToRemove); checkRemoveFromClusterNodeLabels(labelsToRemove); internalRemoveFromClusterNodeLabels(labelsToRemove); } protected void checkRemoveLabelsFromNode( Map<NodeId, Set<String>> removeLabelsFromNode) throws IOException { // check all labels being added existed Set<String> knownLabels = labelCollections.keySet(); for (Entry<NodeId, Set<String>> entry : removeLabelsFromNode.entrySet()) { NodeId nodeId = entry.getKey(); Set<String> labels = entry.getValue(); if (!knownLabels.containsAll(labels)) { String msg = "Not all labels being removed contained by known " + "label collections, please check" + ", removed labels=[" + StringUtils.join(labels, ",") + "]"; LOG.error(msg); throw new IOException(msg); } Set<String> originalLabels = null; boolean nodeExisted = false; if (WILDCARD_PORT != nodeId.getPort()) { Node nm = getNMInNodeSet(nodeId); if (nm != null) { originalLabels = nm.labels; nodeExisted = true; } } else { Host host = nodeCollections.get(nodeId.getHost()); if (null != host) { originalLabels = host.labels; nodeExisted = true; } } if (!nodeExisted) { String msg = "Try to remove labels from NM=" + nodeId + ", but the NM doesn't existed"; LOG.error(msg); throw new IOException(msg); } // the labels will never be null if (labels.isEmpty()) { continue; } // originalLabels may be null, // because when a Node is created, Node.labels can be null. if (originalLabels == null || !originalLabels.containsAll(labels)) { String msg = "Try to remove labels = [" + StringUtils.join(labels, ",") + "], but not all labels contained by NM=" + nodeId; LOG.error(msg); throw new IOException(msg); } } } private void addNodeToLabels(NodeId node, Set<String> labels) { for(String l : labels) { labelCollections.get(l).addNodeId(node); } } protected void removeNodeFromLabels(NodeId node, Set<String> labels) { for(String l : labels) { labelCollections.get(l).removeNodeId(node); } } private void replaceNodeForLabels(NodeId node, Set<String> oldLabels, Set<String> newLabels) { if(oldLabels != null) { removeNodeFromLabels(node, oldLabels); } addNodeToLabels(node, newLabels); } private void addLabelsToNodeInHost(NodeId node, Set<String> labels) throws IOException { Host host = nodeCollections.get(node.getHost()); if (null == host) { throw new IOException("Cannot add labels to a host that " + "does not exist. Create the host before adding labels to it."); } Node nm = host.nms.get(node); if (nm != null) { Node newNm = nm.copy(); if (newNm.labels == null) { newNm.labels = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>()); } newNm.labels.addAll(labels); host.nms.put(node, newNm); } } protected void removeLabelsFromNodeInHost(NodeId node, Set<String> labels) throws IOException { Host host = nodeCollections.get(node.getHost()); if (null == host) { throw new IOException("Cannot remove labels from a host that " + "does not exist. Create the host before adding labels to it."); } Node nm = host.nms.get(node); if (nm != null) { if (nm.labels == null) { nm.labels = new HashSet<String>(); } else { nm.labels.removeAll(labels); } } } private void replaceLabelsForNode(NodeId node, Set<String> oldLabels, Set<String> newLabels) throws IOException { if(oldLabels != null) { removeLabelsFromNodeInHost(node, oldLabels); } addLabelsToNodeInHost(node, newLabels); } protected boolean isNodeLabelExplicit(NodeId nodeId) { return !isNodeLabelFromHost.containsKey(nodeId) || isNodeLabelFromHost.get(nodeId); } @SuppressWarnings("unchecked") protected void internalUpdateLabelsOnNodes( Map<NodeId, Set<String>> nodeToLabels, NodeLabelUpdateOperation op) throws IOException { // do update labels from nodes Map<NodeId, Set<String>> newNMToLabels = new HashMap<NodeId, Set<String>>(); Set<String> oldLabels; for (Entry<NodeId, Set<String>> entry : nodeToLabels.entrySet()) { NodeId nodeId = entry.getKey(); Set<String> labels = entry.getValue(); createHostIfNonExisted(nodeId.getHost()); if (nodeId.getPort() == WILDCARD_PORT) { Host host = nodeCollections.get(nodeId.getHost()); switch (op) { case REMOVE: removeNodeFromLabels(nodeId, labels); host.labels.removeAll(labels); for (Node node : host.nms.values()) { if (node.labels != null) { node.labels.removeAll(labels); } removeNodeFromLabels(node.nodeId, labels); } break; case ADD: addNodeToLabels(nodeId, labels); host.labels.addAll(labels); for (Node node : host.nms.values()) { if (node.labels != null) { node.labels.addAll(labels); } addNodeToLabels(node.nodeId, labels); isNodeLabelFromHost.put(node.nodeId, true); } break; case REPLACE: replaceNodeForLabels(nodeId, host.labels, labels); replaceLabelsForNode(nodeId, host.labels, labels); host.labels.clear(); host.labels.addAll(labels); for (Node node : host.nms.values()) { replaceNodeForLabels(node.nodeId, node.labels, labels); replaceLabelsForNode(node.nodeId, node.labels, labels); node.labels = null; isNodeLabelFromHost.put(node.nodeId, true); } break; default: break; } newNMToLabels.put(nodeId, host.labels); } else { if (EnumSet.of(NodeLabelUpdateOperation.ADD, NodeLabelUpdateOperation.REPLACE).contains(op)) { // Add and replace createNodeIfNonExisted(nodeId); Node nm = getNMInNodeSet(nodeId); switch (op) { case ADD: addNodeToLabels(nodeId, labels); if (nm.labels == null) { nm.labels = new HashSet<String>(); } nm.labels.addAll(labels); isNodeLabelFromHost.put(nm.nodeId, false); break; case REPLACE: oldLabels = getLabelsByNode(nodeId); replaceNodeForLabels(nodeId, oldLabels, labels); replaceLabelsForNode(nodeId, oldLabels, labels); if (nm.labels == null) { nm.labels = new HashSet<String>(); } nm.labels.clear(); nm.labels.addAll(labels); isNodeLabelFromHost.put(nm.nodeId, false); break; default: break; } newNMToLabels.put(nodeId, nm.labels); } else { // remove removeNodeFromLabels(nodeId, labels); Node nm = getNMInNodeSet(nodeId); if (nm.labels != null) { nm.labels.removeAll(labels); newNMToLabels.put(nodeId, nm.labels); } } } } if (null != dispatcher && isCentralizedNodeLabelConfiguration) { // In case of DistributedNodeLabelConfiguration or // DelegatedCentralizedNodeLabelConfiguration, no need to save the // NodeLabels Mapping to the back-end store, as on RM restart/failover // NodeLabels are collected from NM through Register/Heartbeat again // in case of DistributedNodeLabelConfiguration and collected from // RMNodeLabelsMappingProvider in case of // DelegatedCentralizedNodeLabelConfiguration dispatcher.getEventHandler().handle( new UpdateNodeToLabelsMappingsEvent(newNMToLabels)); } // shows node->labels we added LOG.info(op.name() + " labels on nodes:"); for (Entry<NodeId, Set<String>> entry : newNMToLabels.entrySet()) { LOG.info(" NM=" + entry.getKey() + ", labels=[" + StringUtils.join(entry.getValue().iterator(), ",") + "]"); } } /** * remove labels from nodes, labels being removed most be contained by these * nodes. * * @param removeLabelsFromNode node {@literal ->} labels map * @throws IOException io error occur. */ public void removeLabelsFromNode(Map<NodeId, Set<String>> removeLabelsFromNode) throws IOException { if (!nodeLabelsEnabled) { LOG.error(NODE_LABELS_NOT_ENABLED_ERR); throw new IOException(NODE_LABELS_NOT_ENABLED_ERR); } removeLabelsFromNode = normalizeNodeIdToLabels(removeLabelsFromNode); checkRemoveLabelsFromNode(removeLabelsFromNode); internalUpdateLabelsOnNodes(removeLabelsFromNode, NodeLabelUpdateOperation.REMOVE); } protected void checkReplaceLabelsOnNode( Map<NodeId, Set<String>> replaceLabelsToNode) throws IOException { if (null == replaceLabelsToNode || replaceLabelsToNode.isEmpty()) { return; } // check all labels being added existed Set<String> knownLabels = labelCollections.keySet(); for (Entry<NodeId, Set<String>> entry : replaceLabelsToNode.entrySet()) { NodeId nodeId = entry.getKey(); Set<String> labels = entry.getValue(); // As in YARN-2694, we disable user add more than 1 labels on a same host if (labels.size() > 1) { String msg = String.format("%d labels specified on host=%s" + ", please note that we do not support specifying multiple" + " labels on a single host for now.", labels.size(), nodeId.getHost()); LOG.error(msg); throw new IOException(msg); } if (!knownLabels.containsAll(labels)) { String msg = "Not all labels being replaced contained by known " + "label collections, please check" + ", new labels=[" + StringUtils.join(labels, ",") + "]"; LOG.error(msg); throw new IOException(msg); } } } /** * replace labels to nodes * * @param replaceLabelsToNode node {@literal ->} labels map * @throws IOException io error occur. */ public void replaceLabelsOnNode(Map<NodeId, Set<String>> replaceLabelsToNode) throws IOException { if (!nodeLabelsEnabled) { LOG.error(NODE_LABELS_NOT_ENABLED_ERR); throw new IOException(NODE_LABELS_NOT_ENABLED_ERR); } replaceLabelsToNode = normalizeNodeIdToLabels(replaceLabelsToNode); checkReplaceLabelsOnNode(replaceLabelsToNode); internalUpdateLabelsOnNodes(replaceLabelsToNode, NodeLabelUpdateOperation.REPLACE); } /** * Get mapping of nodes to labels * * @return nodes to labels map */ public Map<NodeId, Set<String>> getNodeLabels() { Map<NodeId, Set<String>> nodeToLabels = generateNodeLabelsInfoPerNode(String.class); return nodeToLabels; } /** * Get mapping of nodes to label info * * @return nodes to labels map */ public Map<NodeId, Set<NodeLabel>> getNodeLabelsInfo() { Map<NodeId, Set<NodeLabel>> nodeToLabels = generateNodeLabelsInfoPerNode(NodeLabel.class); return nodeToLabels; } @SuppressWarnings("unchecked") private <T> Map<NodeId, Set<T>> generateNodeLabelsInfoPerNode(Class<T> type) { readLock.lock(); try { Map<NodeId, Set<T>> nodeToLabels = new HashMap<>(); for (Entry<String, Host> entry : nodeCollections.entrySet()) { String hostName = entry.getKey(); Host host = entry.getValue(); for (NodeId nodeId : host.nms.keySet()) { if (type.isAssignableFrom(String.class)) { Set<String> nodeLabels = getLabelsByNode(nodeId); if (nodeLabels == null || nodeLabels.isEmpty()) { continue; } nodeToLabels.put(nodeId, (Set<T>) nodeLabels); } else { Set<NodeLabel> nodeLabels = getLabelsInfoByNode(nodeId); if (nodeLabels == null || nodeLabels.isEmpty()) { continue; } nodeToLabels.put(nodeId, (Set<T>) nodeLabels); } } if (!host.labels.isEmpty()) { if (type.isAssignableFrom(String.class)) { nodeToLabels.put(NodeId.newInstance(hostName, WILDCARD_PORT), (Set<T>) host.labels); } else { nodeToLabels.put(NodeId.newInstance(hostName, WILDCARD_PORT), (Set<T>) createNodeLabelFromLabelNames(host.labels)); } } } return Collections.unmodifiableMap(nodeToLabels); } finally { readLock.unlock(); } } /** * Get nodes that have no labels. * * @return set of nodes with no labels */ public Set<NodeId> getNodesWithoutALabel() { readLock.lock(); try { Set<NodeId> nodes = new HashSet<>(); for (Host host : nodeCollections.values()) { for (NodeId nodeId : host.nms.keySet()) { if (getLabelsByNode(nodeId).isEmpty()) { nodes.add(nodeId); } } } return Collections.unmodifiableSet(nodes); } finally { readLock.unlock(); } } /** * Get mapping of labels to nodes for all the labels. * * @return labels to nodes map */ public Map<String, Set<NodeId>> getLabelsToNodes() { readLock.lock(); try { return getLabelsToNodes(labelCollections.keySet()); } finally { readLock.unlock(); } } /** * Get mapping of labels to nodes for specified set of labels. * * @param labels set of labels for which labels to nodes mapping will be * returned. * @return labels to nodes map */ public Map<String, Set<NodeId>> getLabelsToNodes(Set<String> labels) { readLock.lock(); try { Map<String, Set<NodeId>> labelsToNodes = getLabelsToNodesMapping(labels, String.class); return Collections.unmodifiableMap(labelsToNodes); } finally { readLock.unlock(); } } /** * Get mapping of labels to nodes for all the labels. * * @return labels to nodes map */ public Map<NodeLabel, Set<NodeId>> getLabelsInfoToNodes() { readLock.lock(); try { return getLabelsInfoToNodes(labelCollections.keySet()); } finally { readLock.unlock(); } } /** * Get mapping of labels info to nodes for specified set of labels. * * @param labels * set of nodelabels for which labels to nodes mapping will be * returned. * @return labels to nodes map */ public Map<NodeLabel, Set<NodeId>> getLabelsInfoToNodes(Set<String> labels) { readLock.lock(); try { Map<NodeLabel, Set<NodeId>> labelsToNodes = getLabelsToNodesMapping( labels, NodeLabel.class); return Collections.unmodifiableMap(labelsToNodes); } finally { readLock.unlock(); } } private <T> Map<T, Set<NodeId>> getLabelsToNodesMapping(Set<String> labels, Class<T> type) { Map<T, Set<NodeId>> labelsToNodes = new HashMap<T, Set<NodeId>>(); for (String label : labels) { if (label.equals(NO_LABEL)) { continue; } RMNodeLabel nodeLabelInfo = labelCollections.get(label); if (nodeLabelInfo != null) { Set<NodeId> nodeIds = nodeLabelInfo.getAssociatedNodeIds(); if (!nodeIds.isEmpty()) { if (type.isAssignableFrom(String.class)) { labelsToNodes.put(type.cast(label), nodeIds); } else { labelsToNodes.put(type.cast(nodeLabelInfo.getNodeLabel()), nodeIds); } } } else { LOG.warn("getLabelsToNodes : Label [" + label + "] cannot be found"); } } return labelsToNodes; } /** * Get existing valid labels in repository * * @return existing valid labels in repository */ public Set<String> getClusterNodeLabelNames() { readLock.lock(); try { Set<String> labels = new HashSet<String>(labelCollections.keySet()); labels.remove(NO_LABEL); return Collections.unmodifiableSet(labels); } finally { readLock.unlock(); } } public List<NodeLabel> getClusterNodeLabels() { readLock.lock(); try { List<NodeLabel> nodeLabels = new ArrayList<>(); for (RMNodeLabel label : labelCollections.values()) { if (!label.getLabelName().equals(NO_LABEL)) { nodeLabels.add(NodeLabel.newInstance(label.getLabelName(), label.getIsExclusive())); } } return nodeLabels; } finally { readLock.unlock(); } } public boolean isExclusiveNodeLabel(String nodeLabel) throws IOException { if (nodeLabel.equals(NO_LABEL)) { return noNodeLabel.getIsExclusive(); } readLock.lock(); try { RMNodeLabel label = labelCollections.get(nodeLabel); if (label == null) { String message = "Getting is-exclusive-node-label, node-label = " + nodeLabel + ", is not existed."; LOG.error(message); throw new IOException(message); } return label.getIsExclusive(); } finally { readLock.unlock(); } } private void checkExclusivityMatch(Collection<NodeLabel> labels) throws IOException { ArrayList<NodeLabel> mismatchlabels = new ArrayList<NodeLabel>(); for (NodeLabel label : labels) { RMNodeLabel rmNodeLabel = this.labelCollections.get(label.getName()); if (rmNodeLabel != null && rmNodeLabel.getIsExclusive() != label.isExclusive()) { mismatchlabels.add(label); } } if (mismatchlabels.size() > 0) { throw new IOException( "Exclusivity cannot be modified for an existing label with : " + StringUtils.join(mismatchlabels.iterator(), ",")); } } protected String normalizeLabel(String label) { if (label != null) { return label.trim(); } return NO_LABEL; } private Set<String> normalizeLabels(Collection<String> labels) { Set<String> newLabels = new HashSet<String>(); for (String label : labels) { newLabels.add(normalizeLabel(label)); } return newLabels; } private void normalizeNodeLabels(Collection<NodeLabel> labels) { for (NodeLabel label : labels) { label.setName(normalizeLabel(label.getName())); } } protected Node getNMInNodeSet(NodeId nodeId) { return getNMInNodeSet(nodeId, nodeCollections); } protected Node getNMInNodeSet(NodeId nodeId, Map<String, Host> map) { return getNMInNodeSet(nodeId, map, false); } protected Node getNMInNodeSet(NodeId nodeId, Map<String, Host> map, boolean checkRunning) { Host host = map.get(nodeId.getHost()); if (null == host) { return null; } Node nm = host.nms.get(nodeId); if (null == nm) { return null; } if (checkRunning) { return nm.running ? nm : null; } return nm; } protected Set<String> getLabelsByNode(NodeId nodeId) { return getLabelsByNode(nodeId, nodeCollections); } protected Set<String> getLabelsByNode(NodeId nodeId, Map<String, Host> map) { Host host = map.get(nodeId.getHost()); if (null == host) { return EMPTY_STRING_SET; } Node nm = host.nms.get(nodeId); if (null != nm && null != nm.labels) { return nm.labels; } else { return host.labels; } } public Set<NodeLabel> getLabelsInfoByNode(NodeId nodeId) { readLock.lock(); try { Set<String> labels = getLabelsByNode(nodeId, nodeCollections); if (labels.isEmpty()) { return EMPTY_NODELABEL_SET; } Set<NodeLabel> nodeLabels = createNodeLabelFromLabelNames(labels); return nodeLabels; } finally { readLock.unlock(); } } private Set<NodeLabel> createNodeLabelFromLabelNames(Set<String> labels) { Set<NodeLabel> nodeLabels = new HashSet<NodeLabel>(); for (String label : labels) { if (label.equals(NO_LABEL)) { continue; } RMNodeLabel rmLabel = labelCollections.get(label); if (rmLabel == null) { continue; } nodeLabels.add(rmLabel.getNodeLabel()); } return nodeLabels; } protected void createNodeIfNonExisted(NodeId nodeId) throws IOException { Host host = nodeCollections.get(nodeId.getHost()); if (null == host) { throw new IOException("Should create host before creating node."); } Node nm = host.nms.get(nodeId); if (null == nm) { host.nms.put(nodeId, new Node(nodeId)); } } protected void createHostIfNonExisted(String hostName) { Host host = nodeCollections.get(hostName); if (null == host) { host = new Host(); nodeCollections.put(hostName, host); } } protected Map<NodeId, Set<String>> normalizeNodeIdToLabels( Map<NodeId, Set<String>> nodeIdToLabels) { Map<NodeId, Set<String>> newMap = new TreeMap<NodeId, Set<String>>(); for (Entry<NodeId, Set<String>> entry : nodeIdToLabels.entrySet()) { NodeId id = entry.getKey(); Set<String> labels = entry.getValue(); newMap.put(id, normalizeLabels(labels)); } return newMap; } public void setInitNodeLabelStoreInProgress( boolean initNodeLabelStoreInProgress) { this.initNodeLabelStoreInProgress = initNodeLabelStoreInProgress; } }
googleapis/google-cloud-java
37,818
java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/ListAttackPathsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1; /** * * * <pre> * Request message for listing the attack paths for a given simulation or valued * resource. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.ListAttackPathsRequest} */ public final class ListAttackPathsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.ListAttackPathsRequest) ListAttackPathsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListAttackPathsRequest.newBuilder() to construct. private ListAttackPathsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAttackPathsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAttackPathsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListAttackPathsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListAttackPathsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ListAttackPathsRequest.class, com.google.cloud.securitycenter.v1.ListAttackPathsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 4; private int pageSize_ = 0; /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (pageSize_ != 0) { output.writeInt32(4, pageSize_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.ListAttackPathsRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.ListAttackPathsRequest other = (com.google.cloud.securitycenter.v1.ListAttackPathsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v1.ListAttackPathsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for listing the attack paths for a given simulation or valued * resource. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.ListAttackPathsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.ListAttackPathsRequest) com.google.cloud.securitycenter.v1.ListAttackPathsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListAttackPathsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListAttackPathsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ListAttackPathsRequest.class, com.google.cloud.securitycenter.v1.ListAttackPathsRequest.Builder.class); } // Construct using com.google.cloud.securitycenter.v1.ListAttackPathsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageToken_ = ""; pageSize_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListAttackPathsRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListAttackPathsRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.ListAttackPathsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.ListAttackPathsRequest build() { com.google.cloud.securitycenter.v1.ListAttackPathsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListAttackPathsRequest buildPartial() { com.google.cloud.securitycenter.v1.ListAttackPathsRequest result = new com.google.cloud.securitycenter.v1.ListAttackPathsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v1.ListAttackPathsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageSize_ = pageSize_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.ListAttackPathsRequest) { return mergeFrom((com.google.cloud.securitycenter.v1.ListAttackPathsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v1.ListAttackPathsRequest other) { if (other == com.google.cloud.securitycenter.v1.ListAttackPathsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000008); pageSize_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.ListAttackPathsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.ListAttackPathsRequest) private static final com.google.cloud.securitycenter.v1.ListAttackPathsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.ListAttackPathsRequest(); } public static com.google.cloud.securitycenter.v1.ListAttackPathsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAttackPathsRequest> PARSER = new com.google.protobuf.AbstractParser<ListAttackPathsRequest>() { @java.lang.Override public ListAttackPathsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAttackPathsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAttackPathsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListAttackPathsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,818
java-vertexai/proto-google-cloud-vertexai-v1/src/main/java/com/google/cloud/vertexai/api/DirectPredictResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vertexai/v1/prediction_service.proto // Protobuf Java Version: 3.25.3 package com.google.cloud.vertexai.api; /** * * * <pre> * Response message for * [PredictionService.DirectPredict][google.cloud.aiplatform.v1.PredictionService.DirectPredict]. * </pre> * * Protobuf type {@code google.cloud.vertexai.v1.DirectPredictResponse} */ public final class DirectPredictResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vertexai.v1.DirectPredictResponse) DirectPredictResponseOrBuilder { private static final long serialVersionUID = 0L; // Use DirectPredictResponse.newBuilder() to construct. private DirectPredictResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DirectPredictResponse() { outputs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DirectPredictResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vertexai.api.PredictionServiceProto .internal_static_google_cloud_vertexai_v1_DirectPredictResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vertexai.api.PredictionServiceProto .internal_static_google_cloud_vertexai_v1_DirectPredictResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vertexai.api.DirectPredictResponse.class, com.google.cloud.vertexai.api.DirectPredictResponse.Builder.class); } private int bitField0_; public static final int OUTPUTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.vertexai.api.Tensor> outputs_; /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.vertexai.api.Tensor> getOutputsList() { return outputs_; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.vertexai.api.TensorOrBuilder> getOutputsOrBuilderList() { return outputs_; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ @java.lang.Override public int getOutputsCount() { return outputs_.size(); } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ @java.lang.Override public com.google.cloud.vertexai.api.Tensor getOutputs(int index) { return outputs_.get(index); } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ @java.lang.Override public com.google.cloud.vertexai.api.TensorOrBuilder getOutputsOrBuilder(int index) { return outputs_.get(index); } public static final int PARAMETERS_FIELD_NUMBER = 2; private com.google.cloud.vertexai.api.Tensor parameters_; /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> * * @return Whether the parameters field is set. */ @java.lang.Override public boolean hasParameters() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> * * @return The parameters. */ @java.lang.Override public com.google.cloud.vertexai.api.Tensor getParameters() { return parameters_ == null ? com.google.cloud.vertexai.api.Tensor.getDefaultInstance() : parameters_; } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ @java.lang.Override public com.google.cloud.vertexai.api.TensorOrBuilder getParametersOrBuilder() { return parameters_ == null ? com.google.cloud.vertexai.api.Tensor.getDefaultInstance() : parameters_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < outputs_.size(); i++) { output.writeMessage(1, outputs_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getParameters()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < outputs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, outputs_.get(i)); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getParameters()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vertexai.api.DirectPredictResponse)) { return super.equals(obj); } com.google.cloud.vertexai.api.DirectPredictResponse other = (com.google.cloud.vertexai.api.DirectPredictResponse) obj; if (!getOutputsList().equals(other.getOutputsList())) return false; if (hasParameters() != other.hasParameters()) return false; if (hasParameters()) { if (!getParameters().equals(other.getParameters())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getOutputsCount() > 0) { hash = (37 * hash) + OUTPUTS_FIELD_NUMBER; hash = (53 * hash) + getOutputsList().hashCode(); } if (hasParameters()) { hash = (37 * hash) + PARAMETERS_FIELD_NUMBER; hash = (53 * hash) + getParameters().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vertexai.api.DirectPredictResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.vertexai.api.DirectPredictResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [PredictionService.DirectPredict][google.cloud.aiplatform.v1.PredictionService.DirectPredict]. * </pre> * * Protobuf type {@code google.cloud.vertexai.v1.DirectPredictResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vertexai.v1.DirectPredictResponse) com.google.cloud.vertexai.api.DirectPredictResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vertexai.api.PredictionServiceProto .internal_static_google_cloud_vertexai_v1_DirectPredictResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vertexai.api.PredictionServiceProto .internal_static_google_cloud_vertexai_v1_DirectPredictResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vertexai.api.DirectPredictResponse.class, com.google.cloud.vertexai.api.DirectPredictResponse.Builder.class); } // Construct using com.google.cloud.vertexai.api.DirectPredictResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getOutputsFieldBuilder(); getParametersFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (outputsBuilder_ == null) { outputs_ = java.util.Collections.emptyList(); } else { outputs_ = null; outputsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); parameters_ = null; if (parametersBuilder_ != null) { parametersBuilder_.dispose(); parametersBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vertexai.api.PredictionServiceProto .internal_static_google_cloud_vertexai_v1_DirectPredictResponse_descriptor; } @java.lang.Override public com.google.cloud.vertexai.api.DirectPredictResponse getDefaultInstanceForType() { return com.google.cloud.vertexai.api.DirectPredictResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vertexai.api.DirectPredictResponse build() { com.google.cloud.vertexai.api.DirectPredictResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vertexai.api.DirectPredictResponse buildPartial() { com.google.cloud.vertexai.api.DirectPredictResponse result = new com.google.cloud.vertexai.api.DirectPredictResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.vertexai.api.DirectPredictResponse result) { if (outputsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { outputs_ = java.util.Collections.unmodifiableList(outputs_); bitField0_ = (bitField0_ & ~0x00000001); } result.outputs_ = outputs_; } else { result.outputs_ = outputsBuilder_.build(); } } private void buildPartial0(com.google.cloud.vertexai.api.DirectPredictResponse result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.parameters_ = parametersBuilder_ == null ? parameters_ : parametersBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vertexai.api.DirectPredictResponse) { return mergeFrom((com.google.cloud.vertexai.api.DirectPredictResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vertexai.api.DirectPredictResponse other) { if (other == com.google.cloud.vertexai.api.DirectPredictResponse.getDefaultInstance()) return this; if (outputsBuilder_ == null) { if (!other.outputs_.isEmpty()) { if (outputs_.isEmpty()) { outputs_ = other.outputs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureOutputsIsMutable(); outputs_.addAll(other.outputs_); } onChanged(); } } else { if (!other.outputs_.isEmpty()) { if (outputsBuilder_.isEmpty()) { outputsBuilder_.dispose(); outputsBuilder_ = null; outputs_ = other.outputs_; bitField0_ = (bitField0_ & ~0x00000001); outputsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOutputsFieldBuilder() : null; } else { outputsBuilder_.addAllMessages(other.outputs_); } } } if (other.hasParameters()) { mergeParameters(other.getParameters()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.vertexai.api.Tensor m = input.readMessage( com.google.cloud.vertexai.api.Tensor.parser(), extensionRegistry); if (outputsBuilder_ == null) { ensureOutputsIsMutable(); outputs_.add(m); } else { outputsBuilder_.addMessage(m); } break; } // case 10 case 18: { input.readMessage(getParametersFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.vertexai.api.Tensor> outputs_ = java.util.Collections.emptyList(); private void ensureOutputsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { outputs_ = new java.util.ArrayList<com.google.cloud.vertexai.api.Tensor>(outputs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder> outputsBuilder_; /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public java.util.List<com.google.cloud.vertexai.api.Tensor> getOutputsList() { if (outputsBuilder_ == null) { return java.util.Collections.unmodifiableList(outputs_); } else { return outputsBuilder_.getMessageList(); } } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public int getOutputsCount() { if (outputsBuilder_ == null) { return outputs_.size(); } else { return outputsBuilder_.getCount(); } } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public com.google.cloud.vertexai.api.Tensor getOutputs(int index) { if (outputsBuilder_ == null) { return outputs_.get(index); } else { return outputsBuilder_.getMessage(index); } } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder setOutputs(int index, com.google.cloud.vertexai.api.Tensor value) { if (outputsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOutputsIsMutable(); outputs_.set(index, value); onChanged(); } else { outputsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder setOutputs( int index, com.google.cloud.vertexai.api.Tensor.Builder builderForValue) { if (outputsBuilder_ == null) { ensureOutputsIsMutable(); outputs_.set(index, builderForValue.build()); onChanged(); } else { outputsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder addOutputs(com.google.cloud.vertexai.api.Tensor value) { if (outputsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOutputsIsMutable(); outputs_.add(value); onChanged(); } else { outputsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder addOutputs(int index, com.google.cloud.vertexai.api.Tensor value) { if (outputsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOutputsIsMutable(); outputs_.add(index, value); onChanged(); } else { outputsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder addOutputs(com.google.cloud.vertexai.api.Tensor.Builder builderForValue) { if (outputsBuilder_ == null) { ensureOutputsIsMutable(); outputs_.add(builderForValue.build()); onChanged(); } else { outputsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder addOutputs( int index, com.google.cloud.vertexai.api.Tensor.Builder builderForValue) { if (outputsBuilder_ == null) { ensureOutputsIsMutable(); outputs_.add(index, builderForValue.build()); onChanged(); } else { outputsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder addAllOutputs( java.lang.Iterable<? extends com.google.cloud.vertexai.api.Tensor> values) { if (outputsBuilder_ == null) { ensureOutputsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, outputs_); onChanged(); } else { outputsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder clearOutputs() { if (outputsBuilder_ == null) { outputs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { outputsBuilder_.clear(); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public Builder removeOutputs(int index) { if (outputsBuilder_ == null) { ensureOutputsIsMutable(); outputs_.remove(index); onChanged(); } else { outputsBuilder_.remove(index); } return this; } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public com.google.cloud.vertexai.api.Tensor.Builder getOutputsBuilder(int index) { return getOutputsFieldBuilder().getBuilder(index); } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public com.google.cloud.vertexai.api.TensorOrBuilder getOutputsOrBuilder(int index) { if (outputsBuilder_ == null) { return outputs_.get(index); } else { return outputsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public java.util.List<? extends com.google.cloud.vertexai.api.TensorOrBuilder> getOutputsOrBuilderList() { if (outputsBuilder_ != null) { return outputsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(outputs_); } } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public com.google.cloud.vertexai.api.Tensor.Builder addOutputsBuilder() { return getOutputsFieldBuilder() .addBuilder(com.google.cloud.vertexai.api.Tensor.getDefaultInstance()); } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public com.google.cloud.vertexai.api.Tensor.Builder addOutputsBuilder(int index) { return getOutputsFieldBuilder() .addBuilder(index, com.google.cloud.vertexai.api.Tensor.getDefaultInstance()); } /** * * * <pre> * The prediction output. * </pre> * * <code>repeated .google.cloud.vertexai.v1.Tensor outputs = 1;</code> */ public java.util.List<com.google.cloud.vertexai.api.Tensor.Builder> getOutputsBuilderList() { return getOutputsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder> getOutputsFieldBuilder() { if (outputsBuilder_ == null) { outputsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder>( outputs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); outputs_ = null; } return outputsBuilder_; } private com.google.cloud.vertexai.api.Tensor parameters_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder> parametersBuilder_; /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> * * @return Whether the parameters field is set. */ public boolean hasParameters() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> * * @return The parameters. */ public com.google.cloud.vertexai.api.Tensor getParameters() { if (parametersBuilder_ == null) { return parameters_ == null ? com.google.cloud.vertexai.api.Tensor.getDefaultInstance() : parameters_; } else { return parametersBuilder_.getMessage(); } } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public Builder setParameters(com.google.cloud.vertexai.api.Tensor value) { if (parametersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } parameters_ = value; } else { parametersBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public Builder setParameters(com.google.cloud.vertexai.api.Tensor.Builder builderForValue) { if (parametersBuilder_ == null) { parameters_ = builderForValue.build(); } else { parametersBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public Builder mergeParameters(com.google.cloud.vertexai.api.Tensor value) { if (parametersBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && parameters_ != null && parameters_ != com.google.cloud.vertexai.api.Tensor.getDefaultInstance()) { getParametersBuilder().mergeFrom(value); } else { parameters_ = value; } } else { parametersBuilder_.mergeFrom(value); } if (parameters_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public Builder clearParameters() { bitField0_ = (bitField0_ & ~0x00000002); parameters_ = null; if (parametersBuilder_ != null) { parametersBuilder_.dispose(); parametersBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public com.google.cloud.vertexai.api.Tensor.Builder getParametersBuilder() { bitField0_ |= 0x00000002; onChanged(); return getParametersFieldBuilder().getBuilder(); } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ public com.google.cloud.vertexai.api.TensorOrBuilder getParametersOrBuilder() { if (parametersBuilder_ != null) { return parametersBuilder_.getMessageOrBuilder(); } else { return parameters_ == null ? com.google.cloud.vertexai.api.Tensor.getDefaultInstance() : parameters_; } } /** * * * <pre> * The parameters that govern the prediction. * </pre> * * <code>.google.cloud.vertexai.v1.Tensor parameters = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder> getParametersFieldBuilder() { if (parametersBuilder_ == null) { parametersBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.vertexai.api.Tensor, com.google.cloud.vertexai.api.Tensor.Builder, com.google.cloud.vertexai.api.TensorOrBuilder>( getParameters(), getParentForChildren(), isClean()); parameters_ = null; } return parametersBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vertexai.v1.DirectPredictResponse) } // @@protoc_insertion_point(class_scope:google.cloud.vertexai.v1.DirectPredictResponse) private static final com.google.cloud.vertexai.api.DirectPredictResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vertexai.api.DirectPredictResponse(); } public static com.google.cloud.vertexai.api.DirectPredictResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DirectPredictResponse> PARSER = new com.google.protobuf.AbstractParser<DirectPredictResponse>() { @java.lang.Override public DirectPredictResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DirectPredictResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DirectPredictResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vertexai.api.DirectPredictResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,820
java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetrics.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/dataflow/v1beta3/metrics.proto // Protobuf Java Version: 3.25.8 package com.google.dataflow.v1beta3; /** * * * <pre> * JobMetrics contains a collection of metrics describing the detailed progress * of a Dataflow job. Metrics correspond to user-defined and system-defined * metrics in the job. For more information, see [Dataflow job metrics] * (https://cloud.google.com/dataflow/docs/guides/using-monitoring-intf). * * This resource captures only the most recent values of each metric; * time-series data can be queried for them (under the same metric names) * from Cloud Monitoring. * </pre> * * Protobuf type {@code google.dataflow.v1beta3.JobMetrics} */ public final class JobMetrics extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.JobMetrics) JobMetricsOrBuilder { private static final long serialVersionUID = 0L; // Use JobMetrics.newBuilder() to construct. private JobMetrics(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private JobMetrics() { metrics_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new JobMetrics(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_JobMetrics_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_JobMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.JobMetrics.class, com.google.dataflow.v1beta3.JobMetrics.Builder.class); } private int bitField0_; public static final int METRIC_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp metricTime_; /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> * * @return Whether the metricTime field is set. */ @java.lang.Override public boolean hasMetricTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> * * @return The metricTime. */ @java.lang.Override public com.google.protobuf.Timestamp getMetricTime() { return metricTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : metricTime_; } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getMetricTimeOrBuilder() { return metricTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : metricTime_; } public static final int METRICS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.dataflow.v1beta3.MetricUpdate> metrics_; /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ @java.lang.Override public java.util.List<com.google.dataflow.v1beta3.MetricUpdate> getMetricsList() { return metrics_; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.dataflow.v1beta3.MetricUpdateOrBuilder> getMetricsOrBuilderList() { return metrics_; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ @java.lang.Override public int getMetricsCount() { return metrics_.size(); } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ @java.lang.Override public com.google.dataflow.v1beta3.MetricUpdate getMetrics(int index) { return metrics_.get(index); } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ @java.lang.Override public com.google.dataflow.v1beta3.MetricUpdateOrBuilder getMetricsOrBuilder(int index) { return metrics_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMetricTime()); } for (int i = 0; i < metrics_.size(); i++) { output.writeMessage(2, metrics_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricTime()); } for (int i = 0; i < metrics_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, metrics_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.dataflow.v1beta3.JobMetrics)) { return super.equals(obj); } com.google.dataflow.v1beta3.JobMetrics other = (com.google.dataflow.v1beta3.JobMetrics) obj; if (hasMetricTime() != other.hasMetricTime()) return false; if (hasMetricTime()) { if (!getMetricTime().equals(other.getMetricTime())) return false; } if (!getMetricsList().equals(other.getMetricsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMetricTime()) { hash = (37 * hash) + METRIC_TIME_FIELD_NUMBER; hash = (53 * hash) + getMetricTime().hashCode(); } if (getMetricsCount() > 0) { hash = (37 * hash) + METRICS_FIELD_NUMBER; hash = (53 * hash) + getMetricsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.dataflow.v1beta3.JobMetrics parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.JobMetrics parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.JobMetrics parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.JobMetrics parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.dataflow.v1beta3.JobMetrics prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * JobMetrics contains a collection of metrics describing the detailed progress * of a Dataflow job. Metrics correspond to user-defined and system-defined * metrics in the job. For more information, see [Dataflow job metrics] * (https://cloud.google.com/dataflow/docs/guides/using-monitoring-intf). * * This resource captures only the most recent values of each metric; * time-series data can be queried for them (under the same metric names) * from Cloud Monitoring. * </pre> * * Protobuf type {@code google.dataflow.v1beta3.JobMetrics} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.JobMetrics) com.google.dataflow.v1beta3.JobMetricsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_JobMetrics_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_JobMetrics_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.JobMetrics.class, com.google.dataflow.v1beta3.JobMetrics.Builder.class); } // Construct using com.google.dataflow.v1beta3.JobMetrics.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMetricTimeFieldBuilder(); getMetricsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; metricTime_ = null; if (metricTimeBuilder_ != null) { metricTimeBuilder_.dispose(); metricTimeBuilder_ = null; } if (metricsBuilder_ == null) { metrics_ = java.util.Collections.emptyList(); } else { metrics_ = null; metricsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.dataflow.v1beta3.MetricsProto .internal_static_google_dataflow_v1beta3_JobMetrics_descriptor; } @java.lang.Override public com.google.dataflow.v1beta3.JobMetrics getDefaultInstanceForType() { return com.google.dataflow.v1beta3.JobMetrics.getDefaultInstance(); } @java.lang.Override public com.google.dataflow.v1beta3.JobMetrics build() { com.google.dataflow.v1beta3.JobMetrics result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.dataflow.v1beta3.JobMetrics buildPartial() { com.google.dataflow.v1beta3.JobMetrics result = new com.google.dataflow.v1beta3.JobMetrics(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.dataflow.v1beta3.JobMetrics result) { if (metricsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { metrics_ = java.util.Collections.unmodifiableList(metrics_); bitField0_ = (bitField0_ & ~0x00000002); } result.metrics_ = metrics_; } else { result.metrics_ = metricsBuilder_.build(); } } private void buildPartial0(com.google.dataflow.v1beta3.JobMetrics result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.metricTime_ = metricTimeBuilder_ == null ? metricTime_ : metricTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.dataflow.v1beta3.JobMetrics) { return mergeFrom((com.google.dataflow.v1beta3.JobMetrics) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.dataflow.v1beta3.JobMetrics other) { if (other == com.google.dataflow.v1beta3.JobMetrics.getDefaultInstance()) return this; if (other.hasMetricTime()) { mergeMetricTime(other.getMetricTime()); } if (metricsBuilder_ == null) { if (!other.metrics_.isEmpty()) { if (metrics_.isEmpty()) { metrics_ = other.metrics_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureMetricsIsMutable(); metrics_.addAll(other.metrics_); } onChanged(); } } else { if (!other.metrics_.isEmpty()) { if (metricsBuilder_.isEmpty()) { metricsBuilder_.dispose(); metricsBuilder_ = null; metrics_ = other.metrics_; bitField0_ = (bitField0_ & ~0x00000002); metricsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetricsFieldBuilder() : null; } else { metricsBuilder_.addAllMessages(other.metrics_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMetricTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.dataflow.v1beta3.MetricUpdate m = input.readMessage( com.google.dataflow.v1beta3.MetricUpdate.parser(), extensionRegistry); if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.add(m); } else { metricsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp metricTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> metricTimeBuilder_; /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> * * @return Whether the metricTime field is set. */ public boolean hasMetricTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> * * @return The metricTime. */ public com.google.protobuf.Timestamp getMetricTime() { if (metricTimeBuilder_ == null) { return metricTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : metricTime_; } else { return metricTimeBuilder_.getMessage(); } } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public Builder setMetricTime(com.google.protobuf.Timestamp value) { if (metricTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metricTime_ = value; } else { metricTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public Builder setMetricTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (metricTimeBuilder_ == null) { metricTime_ = builderForValue.build(); } else { metricTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public Builder mergeMetricTime(com.google.protobuf.Timestamp value) { if (metricTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && metricTime_ != null && metricTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getMetricTimeBuilder().mergeFrom(value); } else { metricTime_ = value; } } else { metricTimeBuilder_.mergeFrom(value); } if (metricTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public Builder clearMetricTime() { bitField0_ = (bitField0_ & ~0x00000001); metricTime_ = null; if (metricTimeBuilder_ != null) { metricTimeBuilder_.dispose(); metricTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getMetricTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetricTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getMetricTimeOrBuilder() { if (metricTimeBuilder_ != null) { return metricTimeBuilder_.getMessageOrBuilder(); } else { return metricTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : metricTime_; } } /** * * * <pre> * Timestamp as of which metric values are current. * </pre> * * <code>.google.protobuf.Timestamp metric_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getMetricTimeFieldBuilder() { if (metricTimeBuilder_ == null) { metricTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getMetricTime(), getParentForChildren(), isClean()); metricTime_ = null; } return metricTimeBuilder_; } private java.util.List<com.google.dataflow.v1beta3.MetricUpdate> metrics_ = java.util.Collections.emptyList(); private void ensureMetricsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { metrics_ = new java.util.ArrayList<com.google.dataflow.v1beta3.MetricUpdate>(metrics_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.MetricUpdate, com.google.dataflow.v1beta3.MetricUpdate.Builder, com.google.dataflow.v1beta3.MetricUpdateOrBuilder> metricsBuilder_; /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public java.util.List<com.google.dataflow.v1beta3.MetricUpdate> getMetricsList() { if (metricsBuilder_ == null) { return java.util.Collections.unmodifiableList(metrics_); } else { return metricsBuilder_.getMessageList(); } } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public int getMetricsCount() { if (metricsBuilder_ == null) { return metrics_.size(); } else { return metricsBuilder_.getCount(); } } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public com.google.dataflow.v1beta3.MetricUpdate getMetrics(int index) { if (metricsBuilder_ == null) { return metrics_.get(index); } else { return metricsBuilder_.getMessage(index); } } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder setMetrics(int index, com.google.dataflow.v1beta3.MetricUpdate value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.set(index, value); onChanged(); } else { metricsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder setMetrics( int index, com.google.dataflow.v1beta3.MetricUpdate.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.set(index, builderForValue.build()); onChanged(); } else { metricsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder addMetrics(com.google.dataflow.v1beta3.MetricUpdate value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.add(value); onChanged(); } else { metricsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder addMetrics(int index, com.google.dataflow.v1beta3.MetricUpdate value) { if (metricsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsIsMutable(); metrics_.add(index, value); onChanged(); } else { metricsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder addMetrics(com.google.dataflow.v1beta3.MetricUpdate.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.add(builderForValue.build()); onChanged(); } else { metricsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder addMetrics( int index, com.google.dataflow.v1beta3.MetricUpdate.Builder builderForValue) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.add(index, builderForValue.build()); onChanged(); } else { metricsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder addAllMetrics( java.lang.Iterable<? extends com.google.dataflow.v1beta3.MetricUpdate> values) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, metrics_); onChanged(); } else { metricsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder clearMetrics() { if (metricsBuilder_ == null) { metrics_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { metricsBuilder_.clear(); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public Builder removeMetrics(int index) { if (metricsBuilder_ == null) { ensureMetricsIsMutable(); metrics_.remove(index); onChanged(); } else { metricsBuilder_.remove(index); } return this; } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public com.google.dataflow.v1beta3.MetricUpdate.Builder getMetricsBuilder(int index) { return getMetricsFieldBuilder().getBuilder(index); } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public com.google.dataflow.v1beta3.MetricUpdateOrBuilder getMetricsOrBuilder(int index) { if (metricsBuilder_ == null) { return metrics_.get(index); } else { return metricsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public java.util.List<? extends com.google.dataflow.v1beta3.MetricUpdateOrBuilder> getMetricsOrBuilderList() { if (metricsBuilder_ != null) { return metricsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(metrics_); } } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public com.google.dataflow.v1beta3.MetricUpdate.Builder addMetricsBuilder() { return getMetricsFieldBuilder() .addBuilder(com.google.dataflow.v1beta3.MetricUpdate.getDefaultInstance()); } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public com.google.dataflow.v1beta3.MetricUpdate.Builder addMetricsBuilder(int index) { return getMetricsFieldBuilder() .addBuilder(index, com.google.dataflow.v1beta3.MetricUpdate.getDefaultInstance()); } /** * * * <pre> * All metrics for this job. * </pre> * * <code>repeated .google.dataflow.v1beta3.MetricUpdate metrics = 2;</code> */ public java.util.List<com.google.dataflow.v1beta3.MetricUpdate.Builder> getMetricsBuilderList() { return getMetricsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.MetricUpdate, com.google.dataflow.v1beta3.MetricUpdate.Builder, com.google.dataflow.v1beta3.MetricUpdateOrBuilder> getMetricsFieldBuilder() { if (metricsBuilder_ == null) { metricsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.dataflow.v1beta3.MetricUpdate, com.google.dataflow.v1beta3.MetricUpdate.Builder, com.google.dataflow.v1beta3.MetricUpdateOrBuilder>( metrics_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); metrics_ = null; } return metricsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.JobMetrics) } // @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.JobMetrics) private static final com.google.dataflow.v1beta3.JobMetrics DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.JobMetrics(); } public static com.google.dataflow.v1beta3.JobMetrics getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<JobMetrics> PARSER = new com.google.protobuf.AbstractParser<JobMetrics>() { @java.lang.Override public JobMetrics parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<JobMetrics> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<JobMetrics> getParserForType() { return PARSER; } @java.lang.Override public com.google.dataflow.v1beta3.JobMetrics getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/druid
37,245
processing/src/test/java/org/apache/druid/math/expr/VectorExprResultConsistencyTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.math.expr; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import org.apache.druid.guice.DruidGuiceExtensions; import org.apache.druid.guice.ExpressionModule; import org.apache.druid.guice.annotations.Json; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Either; import org.apache.druid.java.util.common.NonnullPair; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.math.expr.vector.ExprEvalVector; import org.apache.druid.math.expr.vector.ExprVectorProcessor; import org.apache.druid.query.expression.LookupExprMacro; import org.apache.druid.query.lookup.LookupExtractorFactoryContainer; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.query.lookup.TestMapLookupExtractorFactory; import org.apache.druid.testing.InitializedNullHandlingTest; import org.junit.Assert; import org.junit.Test; import org.junit.jupiter.api.Assertions; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import java.util.function.BooleanSupplier; import java.util.function.DoubleSupplier; import java.util.function.LongSupplier; import java.util.function.Supplier; /** * randomize inputs to various vector expressions and make sure the results match nonvectorized expressions * * this is not a replacement for correctness tests, but will ensure that vectorized and non-vectorized expression * evaluation is at least self-consistent... */ public class VectorExprResultConsistencyTest extends InitializedNullHandlingTest { private static final Logger log = new Logger(VectorExprResultConsistencyTest.class); private static final int NUM_ITERATIONS = 10; private static final int VECTOR_SIZE = 4; private static final Map<String, String> LOOKUP = Map.of( "1", "a", "12", "b", "33", "c", "111", "d", "123", "e", "124", "f" ); private static final Map<String, String> INJECTIVE_LOOKUP = new HashMap<>() { @Override public String get(Object key) { return (String) key; } }; private static final ExprMacroTable MACRO_TABLE; static { final Injector injector = Guice.createInjector( new DruidGuiceExtensions(), new ExpressionModule(), binder -> { final LookupExtractorFactoryContainerProvider lookupProvider = new LookupExtractorFactoryContainerProvider() { @Override public Set<String> getAllLookupNames() { return Set.of("test-lookup", "test-lookup-injective"); } @Override public Optional<LookupExtractorFactoryContainer> get(String lookupName) { if ("test-lookup".equals(lookupName)) { return Optional.of( new LookupExtractorFactoryContainer( "v0", new TestMapLookupExtractorFactory(LOOKUP, false) ) ); } else if ("test-lookup-injective".equals(lookupName)) { return Optional.of( new LookupExtractorFactoryContainer( "v0", new TestMapLookupExtractorFactory(INJECTIVE_LOOKUP, true) ) ); } return Optional.empty(); } @Override public String getCanonicalLookupName(String lookupName) { return ""; } }; ExpressionModule.addExprMacro(binder, LookupExprMacro.class); binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(new DefaultObjectMapper()); binder.bind(LookupExtractorFactoryContainerProvider.class) .toInstance(lookupProvider); } ); MACRO_TABLE = injector.getInstance(ExprMacroTable.class); } final Map<String, ExpressionType> types = ImmutableMap.<String, ExpressionType>builder() .put("l1", ExpressionType.LONG) .put("l2", ExpressionType.LONG) .put("l3", ExpressionType.LONG) .put("d1", ExpressionType.DOUBLE) .put("d2", ExpressionType.DOUBLE) .put("d3", ExpressionType.DOUBLE) .put("s1", ExpressionType.STRING) .put("s2", ExpressionType.STRING) .put("s3", ExpressionType.STRING) .put("boolString1", ExpressionType.STRING) .put("boolString2", ExpressionType.STRING) .put("boolString3", ExpressionType.STRING) .build(); @Test public void testConstants() { testExpression("null", types); testExpression("1", types); testExpression("1.1", types); testExpression("NaN", types); testExpression("Infinity", types); testExpression("-Infinity", types); testExpression("'hello'", types); testExpression("json_object('a', 1, 'b', 'abc', 'c', 3.3, 'd', array(1,2,3))", types); } @Test public void testIdentifiers() { final List<String> columns = new ArrayList<>(types.keySet()); columns.add("unknown"); final List<String> template = List.of("%s"); testFunctions(types, template, columns); } @Test public void testCast() { final Set<String> columns = Set.of("d1", "l1", "s1"); final Set<String> castTo = Set.of( "'STRING'", "'LONG'", "'DOUBLE'", "'ARRAY<STRING>'", "'ARRAY<LONG>'", "'ARRAY<DOUBLE>'" ); final Set<List<String>> args = Sets.cartesianProduct(columns, castTo); final List<String> templates = List.of("cast(%s, %s)"); testFunctions(types, templates, args); } @Test public void testCastArraysRoundTrip() { testExpression("cast(cast(s1, 'ARRAY<STRING>'), 'STRING')", types); testExpression("cast(cast(d1, 'ARRAY<DOUBLE>'), 'DOUBLE')", types); testExpression("cast(cast(d1, 'ARRAY<STRING>'), 'DOUBLE')", types); testExpression("cast(cast(l1, 'ARRAY<LONG>'), 'LONG')", types); testExpression("cast(cast(l1, 'ARRAY<STRING>'), 'LONG')", types); } @Test public void testUnaryOperators() { final List<String> functions = List.of("-"); final List<String> templates = List.of("%sd1", "%sl1"); testFunctions(types, templates, functions); } @Test public void testBinaryMathOperators() { final Set<String> columns = Set.of("d1", "d2", "l1", "l2", "1", "1.0", "nonexistent", "null", "s1"); final Set<String> columns2 = Set.of("d1", "d2", "l1", "l2", "1", "1.0"); final Set<List<String>> templateInputs = Sets.cartesianProduct(columns, columns2); final List<String> templates = new ArrayList<>(); for (List<String> template : templateInputs) { templates.add(StringUtils.format("%s %s %s", template.get(0), "%s", template.get(1))); } final List<String> args = List.of("+", "-", "*", "/", "^", "%"); testFunctions(types, templates, args); } @Test public void testBinaryComparisonOperators() { final Set<String> columns = Set.of("d1", "d2", "l1", "l2", "1", "1.0", "s1", "s2", "nonexistent", "null"); final Set<String> columns2 = Set.of("d1", "d2", "l1", "l2", "1", "1.0", "s1", "s2", "null"); final Set<List<String>> templateInputs = Sets.cartesianProduct(columns, columns2); final List<String> templates = new ArrayList<>(); for (List<String> template : templateInputs) { templates.add(StringUtils.format("%s %s %s", template.get(0), "%s", template.get(1))); } final List<String> args = List.of(">", ">=", "<", "<=", "==", "!="); testFunctions(types, templates, args); } @Test public void testUnaryLogicOperators() { final List<String> functions = List.of("!"); final List<String> templates = List.of("%sd1", "%sl1", "%sboolString1"); testFunctions(types, templates, functions); } @Test public void testBinaryLogicOperators() { final List<String> functions = List.of("&&", "||"); final List<String> templates = List.of( "d1 %s d2", "l1 %s l2", "boolString1 %s boolString2", "(d1 == d2) %s (l1 == l2)" ); testFunctions(types, templates, functions); } @Test public void testBinaryOperatorTrees() { final Set<String> columns = Set.of("d1", "l1", "1", "1.0", "nonexistent", "null"); final Set<String> columns2 = Set.of("d2", "l2", "2", "2.0"); final Set<List<String>> templateInputs = Sets.cartesianProduct(columns, columns2, columns2); final List<String> templates = new ArrayList<>(); for (List<String> template : templateInputs) { templates.add( StringUtils.format( "(%s %s %s) %s %s", template.get(0), "%s", template.get(1), "%s", template.get(2) ) ); } final Set<String> ops = Set.of("+", "-", "*", "/"); final Set<List<String>> args = Sets.cartesianProduct(ops, ops); testFunctions(types, templates, args); } @Test public void testUnivariateFunctions() { final List<String> functions = List.of("parse_long", "isNull", "notNull"); final List<String> templates = List.of("%s(s1)", "%s(l1)", "%s(d1)", "%s(nonexistent)", "%s(null)"); testFunctions(types, templates, functions); } @Test public void testLike() { testExpression("like(s1, '1%')", types); testExpression("like(s1, '%1')", types); testExpression("like(s1, '%1%')", types); } @Test public void testUnivariateMathFunctions() { final List<String> functions = List.of( "abs", "acos", "asin", "atan", "cbrt", "ceil", "cos", "cosh", "cot", "exp", "expm1", "floor", "getExponent", "log", "log10", "log1p", "nextUp", "rint", "signum", "sin", "sinh", "sqrt", "tan", "tanh", "toDegrees", "toRadians", "ulp", "bitwiseComplement", "bitwiseConvertDoubleToLongBits", "bitwiseConvertLongBitsToDouble" ); final List<String> templates = List.of("%s(l1)", "%s(d1)", "%s(pi())", "%s(null)", "%s(missing)"); testFunctions(types, templates, functions); } @Test public void testBivariateMathFunctions() { final List<String> functions = List.of( "atan2", "copySign", "div", "hypot", "remainder", "max", "min", "nextAfter", "scalb", "pow", "safe_divide", "bitwiseAnd", "bitwiseOr", "bitwiseXor", "bitwiseShiftLeft", "bitwiseShiftRight" ); final List<String> templates = List.of( "%s(d1, d2)", "%s(d1, l1)", "%s(l1, d1)", "%s(l1, l2)", "%s(nonexistent, l1)", "%s(nonexistent, d1)" ); testFunctions(types, templates, functions); } @Test public void testSymmetricalBivariateFunctions() { final List<String> functions = List.of( "nvl", "coalesce" ); final List<String> templates = List.of( "%s(d1, d2)", "%s(l1, l2)", "%s(s1, s2)", "%s(nonexistent, l1)", "%s(nonexistent, d1)", "%s(nonexistent, s1)", "%s(nonexistent, nonexistent2)" ); testFunctions(types, templates, functions); } @Test public void testIfFunction() { testExpression("if(l1, l1, l2)", types); testExpression("if(l1, s1, s2)", types); testExpression("if(l1, d1, d2)", types); testExpression("if(d1, l1, l2)", types); testExpression("if(d1, s1, s2)", types); testExpression("if(d1, d1, d2)", types); testExpression("if(boolString1, s1, s2)", types); testExpression("if(boolString1, l1, l2)", types); testExpression("if(boolString1, d1, d2)", types); // make sure eval of else is lazy, else this would be divide by zero error testExpression("if(l1 % 2 == 0, -1, l2 / (l1 % 2))", types); // cannot vectorize mixed types Assertions.assertFalse( Parser.parse("if(s1, l1, d2)", MACRO_TABLE).canVectorize(InputBindings.inspectorFromTypeMap(types)) ); Assertions.assertFalse( Parser.parse("if(s1, d1, s2)", MACRO_TABLE).canVectorize(InputBindings.inspectorFromTypeMap(types)) ); } @Test public void testCaseSearchedFunction() { testExpression("case_searched(boolString1, s1, boolString2, s2, s1)", types); testExpression("case_searched(boolString1, s1, boolString2, s2, boolString3, s3, s1)", types); testExpression("case_searched(boolString1, l1, boolString2, l2, l2)", types); testExpression("case_searched(boolString1, l1, boolString2, l2, boolString3, l3, l2)", types); testExpression("case_searched(boolString1, d1, boolString2, d2, d1)", types); testExpression("case_searched(boolString1, d1, boolString2, d2, boolString3, d3, d1)", types); testExpression("case_searched(l1 % 2 == 0, -1, l1 % 2 == 1, l2 / (l1 % 2))", types); Assertions.assertFalse( Parser.parse("case_searched(boolString1, d1, boolString2, d2, l1)", MACRO_TABLE) .canVectorize(InputBindings.inspectorFromTypeMap(types)) ); Assertions.assertFalse( Parser.parse("case_searched(boolString1, d1, boolString2, l1, d1)", MACRO_TABLE) .canVectorize(InputBindings.inspectorFromTypeMap(types)) ); } @Test public void testCaseSimpleFunction() { testExpression("case_simple(s1, s2, s2, s1, s2)", types); testExpression("case_simple(s1, s2, s2, s1, s2, s1)", types); testExpression("case_simple(s1, s2, l1, s1, l2)", types); testExpression("case_simple(s1, s2, d1, s1, d2, d1)", types); testExpression("case_simple(s1, l1, d1, d1, d2, d1)", types); Assertions.assertFalse( Parser.parse("case_simple(s1, d1, s1, l1, d1)", MACRO_TABLE) .canVectorize(InputBindings.inspectorFromTypeMap(types)) ); } @Test public void testCoalesceFunction() { final List<String> functions = List.of( "coalesce" ); final List<String> templates = List.of( "%s(nonexistent, d1, d2, d3)", "%s(nonexistent, d1, nonexistent2, d2, nonexistent3, d3)", "%s(nonexistent, nonexistent2, l1, l2, nonexistent, l3)", "%s(nonexistent, s1, nonexistent2, s2)" ); testFunctions(types, templates, functions); // cannot vectorize mixed arg types Assertions.assertFalse( Parser.parse("coalesce(s1, d1, s1, l1, d1)", MACRO_TABLE) .canVectorize(InputBindings.inspectorFromTypeMap(types)) ); } @Test public void testStringFns() { testExpression("s1 + s2", types); testExpression("s1 + '-' + s2", types); testExpression("concat(s1, s2)", types); testExpression("concat(s1,'-',s2,'-',l1,'-',d1)", types); } @Test public void testStringFunctions() { testExpression("format('%s-%d-%.2f', s1, l1, d1)", types); testExpression("format('%s %s', s1, s2)", types); testExpression("regexp_extract(s1, '[0-9]+')", types); testExpression("regexp_extract(s1, '([a-z]+)', 1)", types); testExpression("regexp_like(s1, '[0-9]+')", types); testExpression("regexp_like(s1, '^test.*')", types); testExpression("regexp_replace(s1, '[0-9]+', 'NUM')", types); testExpression("contains_string(s1, '1')", types); testExpression("icontains_string(s1, '1')", types); testExpression("replace(s1, 'test', 'TEST')", types); testExpression("replace(s1, s2, s3)", types); testExpression("substring(s1, 0, 3)", types); testExpression("substring(s1, l1, l2)", types); testExpression("right(s1, 3)", types); testExpression("right(s1, l1)", types); testExpression("left(s1, 3)", types); testExpression("left(s1, l1)", types); testExpression("strlen(s1)", types); testExpression("strpos(s1, 'test')", types); testExpression("strpos(s1, s2)", types); testExpression("strpos(s1, s2, l1)", types); testExpression("trim(s1)", types); testExpression("trim(s1, 'abc')", types); testExpression("ltrim(s1)", types); testExpression("ltrim(s1, 'abc')", types); testExpression("rtrim(s1)", types); testExpression("rtrim(s1, 'abc')", types); testExpression("lower(s1)", types); testExpression("upper(s1)", types); testExpression("reverse(s1)", types); testExpression("repeat(s1, 3)", types); testExpression("repeat(s1, l1 % 10)", types); testExpression("lpad(s1, 10, 'x')", types); testExpression("lpad(s1, l1 % 10, s2)", types); testExpression("rpad(s1, 10, 'x')", types); testExpression("rpad(s1, l1 % 10, s2)", types); } @Test public void testLookup() { final List<String> columns = new ArrayList<>(types.keySet()); columns.add("unknown"); final List<String> templates = List.of( "lookup(%s, 'test-lookup')", "lookup(%s, 'test-lookup', 'missing')", "lookup(%s, 'test-lookup-injective')", "lookup(%s, 'test-lookup-injective', 'missing')", "lookup(%s, 'nonexistent-lookup')", "lookup(%s, 'nonexistent-lookup', 'missing')" ); testFunctions(types, templates, columns); } @Test public void testArrayFunctions() { testExpression("array(s1, s2)", types); testExpression("array(l1, l2)", types); testExpression("array(d1, d2)", types); testExpression("array(l1, d2)", types); testExpression("array(s1, l2)", types); testExpression("array_length(array(s1, s2))", types); testExpression("array_length(array(l1, l2, l3))", types); testExpression("array_offset(array(s1, s2), 0)", types); testExpression("array_offset(array(l1, l2), l1)", types); testExpression("array_ordinal(array(s1, s2), 1)", types); testExpression("array_ordinal(array(l1, l2), l1)", types); testExpression("array_contains(array(s1, s2), s1)", types); testExpression("array_contains(array(l1, l2), l1)", types); testExpression("array_contains(array(s1, s2), array(s1))", types); testExpression("array_overlap(array(s1, s2), array(s2, s3))", types); testExpression("array_overlap(array(l1, l2), array(l2, l3))", types); testExpression("scalar_in_array(s1, array(s1, s2))", types); testExpression("scalar_in_array(s1, array('1', '2'))", types); testExpression("scalar_in_array(s1, array(1, 2))", types); testExpression("scalar_in_array(l1, array(l1, l2))", types); testExpression("scalar_in_array(l1, array('1', '2'))", types); testExpression("scalar_in_array(l1, array(1, 2))", types); testExpression("array_offset_of(array(s1, s2), s1)", types); testExpression("array_offset_of(array(l1, l2), l1)", types); testExpression("array_ordinal_of(array(s1, s2), s1)", types); testExpression("array_ordinal_of(array(l1, l2), l1)", types); testExpression("array_prepend(s1, array(s2, s3))", types); testExpression("array_prepend(l1, array(l2, l3))", types); testExpression("array_append(array(s1, s2), s3)", types); testExpression("array_append(array(l1, l2), l3)", types); testExpression("array_concat(array(s1, s2), array(s2, s3))", types); testExpression("array_concat(array(l1, l2), array(l2, l3))", types); testExpression("array_set_add(array(s1, s2), s1)", types); testExpression("array_set_add(array(l1, l2), l3)", types); testExpression("array_set_add_all(array(s1, s2), array(s2, s3))", types); testExpression("array_set_add_all(array(l1, l2), array(l2, l3))", types); testExpression("array_slice(array(s1, s2, s3), 1, 2)", types); testExpression("array_slice(array(l1, l2, l3), 1, 2)", types); testExpression("array_to_string(array(s1, s2), ',')", types); testExpression("array_to_string(array(l1, l2), s1)", types); testExpression("string_to_array(s1, ',')", types); testExpression("string_to_array(s1, s2)", types); } @Test public void testArrayHigherOrderFunctions() { testExpression("map((x) -> x + 1, array(l1, l2))", types); testExpression("map((x) -> x * 2.0, array(d1, d2))", types); testExpression("map((x) -> concat(x, '_mapped'), array(s1, s2))", types); testExpression("cartesian_map((x, y) -> x + y, array(l1, l2), array(d1, d2))", types); testExpression("cartesian_map((x, y) -> concat(x, cast(y, 'STRING')), array(s1, s2), array(l1, l2))", types); testExpression("fold((x, acc) -> x + acc, array(l1, l2), 0)", types); testExpression("fold((x, acc) -> x + acc, array(d1, d2), 0.0)", types); testExpression("fold((x, acc) -> concat(acc, x), array(s1, s2), '')", types); testExpression("cartesian_fold((x, y, acc) -> acc + x + y, array(l1, l2), array(d1, d2), 0)", types); testExpression("filter((x) -> x > 0, array(l1, l2))", types); testExpression("filter((x) -> x > 0.0, array(d1, d2))", types); testExpression("filter((x) -> strlen(x) > 0, array(s1, s2))", types); testExpression("any((x) -> x > 0, array(l1, l2))", types); testExpression("any((x) -> x > 0.0, array(d1, d2))", types); testExpression("any((x) -> strlen(x) > 0, array(s1, s2))", types); testExpression("all((x) -> x != null, array(l1, l2))", types); testExpression("all((x) -> x != null, array(d1, d2))", types); testExpression("all((x) -> x != null, array(s1, s2))", types); } @Test public void testReduceFns() { testExpression("greatest(s1, s2)", types); testExpression("greatest(l1, l2)", types); testExpression("greatest(l1, nonexistent)", types); testExpression("greatest(d1, d2)", types); testExpression("greatest(l1, d2)", types); testExpression("greatest(s1, l2)", types); testExpression("least(s1, s2)", types); testExpression("least(l1, l2)", types); testExpression("least(l1, nonexistent)", types); testExpression("least(d1, d2)", types); testExpression("least(l1, d2)", types); testExpression("least(s1, l2)", types); } @Test public void testJsonFns() { testExpression("json_object('k1', s1, 'k2', l1)", types); testExpression("json_value(json_object('k1', s1, 'k2', l1), '$.k2', 'STRING')", types); testExpression("json_query(json_object('k1', s1, 'k2', l1), '$.k1')", types); testExpression("json_query_array(json_object('arr', array(s1, s2)), '$.arr')", types); testExpression("parse_json(s1)", types); testExpression("try_parse_json(s1)", types); testExpression("to_json_string(json_object('k1', s1))", types); testExpression("json_keys(json_object('k1', s1, 'k2', l1), '$')", types); testExpression("json_paths(json_object('k1', s1, 'k2', l1))", types); testExpression("json_merge(json_object('k1', s1), json_object('k2', l1))", types); } @Test public void testTimeFunctions() { testExpression("timestamp_ceil(l1, 'P1D')", types); testExpression("timestamp_ceil(l1, 'PT1H')", types); testExpression("timestamp_floor(l1, 'P1D')", types); testExpression("timestamp_floor(l1, 'PT1H')", types); testExpression("timestamp_extract(l1, 'MILLENNIUM')", types); testExpression("timestamp_extract(l1, 'CENTURY')", types); testExpression("timestamp_extract(l1, 'YEAR')", types); testExpression("timestamp_extract(l1, 'MONTH')", types); testExpression("timestamp_extract(l1, 'DAY')", types); testExpression("timestamp_extract(l1, 'HOUR')", types); testExpression("timestamp_extract(l1, 'MINUTE')", types); testExpression("timestamp_extract(l1, 'SECOND')", types); testExpression("timestamp_extract(l1, 'MILLISECOND')", types); testExpression("timestamp_extract(l1, 'EPOCH')", types); testExpression("timestamp_shift(l1, 'P1M', 1)", types); testExpression("timestamp(s1)", types); testExpression("timestamp(s1, 'yyyy-MM-dd')", types); testExpression("unix_timestamp(s1)", types); testExpression("unix_timestamp(s1, 'yyyy-MM-dd')", types); testExpression("timestamp_parse(s1)", types); testExpression("timestamp_parse(s1, 'yyyy-MM-dd')", types); testExpression("timestamp_parse(s1, 'yyyy-MM-dd', 'UTC')", types); testExpression("timestamp_format(l1)", types); testExpression("timestamp_format(l1, 'yyyy-MM-dd')", types); testExpression("timestamp_format(l1, 'yyyy-MM-dd', 'UTC')", types); } @Test public void testIpAddressFunctions() { testExpression("ipv4_match('192.168.1.1', '192.168.0.0/16')", types); testExpression("ipv4_match(s1, '192.168.0.0/16')", types); testExpression("ipv4_parse('192.168.1.1')", types); testExpression("ipv4_parse(s1)", types); testExpression("ipv4_stringify(3232235777)", types); testExpression("ipv4_stringify(l1)", types); testExpression("ipv6_match('2001:db8::1', '2001:db8::/32')", types); testExpression("ipv6_match(s1, '2001:db8::/32')", types); } @Test public void testOtherFunctions() { testExpression("human_readable_binary_byte_format(l1)", types); testExpression("human_readable_binary_byte_format(l1, 3)", types); testExpression("human_readable_decimal_byte_format(l1)", types); testExpression("human_readable_decimal_byte_format(l1, 3)", types); testExpression("human_readable_decimal_format(l1)", types); testExpression("human_readable_decimal_format(l1, 3)", types); testExpression("pi()", types); } /** * Applies a list of arguments to a list of templates to fill in each template from the argument to generate * expression strings and call {@link #testExpression} for each combination */ static void testFunctions(Map<String, ExpressionType> types, List<String> templates, List<String> args) { for (String template : templates) { for (String arg : args) { String expr = StringUtils.format(template, arg); testExpression(expr, types); } } } /** * Applies a set of argument lists to a list of string templates to fill in each template from the argument list to * generate expression strings and call {@link #testExpression} for each combination */ static void testFunctions(Map<String, ExpressionType> types, List<String> templates, Set<List<String>> argsArrays) { for (String template : templates) { for (List<String> args : argsArrays) { String expr = StringUtils.format(template, args.toArray()); testExpression(expr, types); } } } /** * run an expression both vectorized and non-vectorized, comparing results with randomized bindings and sequential * bindings */ public static void testExpression(String expr, Map<String, ExpressionType> types) { testExpression(expr, types, MACRO_TABLE); } /** * run an expression both vectorized and non-vectorized, comparing results with randomized bindings and sequential * bindings */ public static void testExpression(String expr, Map<String, ExpressionType> types, ExprMacroTable macroTable) { log.debug("running expression [%s]", expr); Expr parsed = Parser.parse(expr, macroTable); testExpressionRandomizedBindings(expr, parsed, types, NUM_ITERATIONS); testExpressionSequentialBindings(expr, parsed, types, NUM_ITERATIONS); } /** * runs expressions against bindings generated by increasing a counter */ public static void testExpressionSequentialBindings( final String expr, final Expr parsed, final Map<String, ExpressionType> types, final int numIterations ) { for (int iter = 0; iter < numIterations; iter++) { assertEvalsMatch( expr, parsed, makeSequentialBinding( VECTOR_SIZE, types, -2 + (iter * VECTOR_SIZE) // include negative numbers and zero ) ); } } public static void testExpressionRandomizedBindings( final String expr, final Expr parsed, final Map<String, ExpressionType> types, final int numIterations ) { for (int iterations = 0; iterations < numIterations; iterations++) { assertEvalsMatch(expr, parsed, makeRandomizedBindings(VECTOR_SIZE, types)); } } public static void assertEvalsMatch( String exprString, Expr expr, NonnullPair<Expr.ObjectBinding[], Expr.VectorInputBinding> bindings ) { Assert.assertTrue(StringUtils.format("Cannot vectorize[%s]", expr), expr.canVectorize(bindings.rhs)); final ExpressionType outputType = expr.getOutputType(bindings.rhs); final Either<String, Object[]> vectorEval = evalVector(expr, bindings.rhs, outputType); final Either<String, Object[]> nonVectorEval = evalNonVector(expr, bindings.lhs, outputType); Assert.assertEquals( StringUtils.format("Errors do not match for expr[%s], bindings[%s]", exprString, bindings.lhs), nonVectorEval.isError() ? nonVectorEval.error() : "", vectorEval.isError() ? vectorEval.error() : "" ); if (vectorEval.isValue() && nonVectorEval.isValue()) { for (int i = 0; i < VECTOR_SIZE; i++) { final String message = StringUtils.format( "Values do not match for row[%s] for expression[%s], bindings[%s]", i, exprString, bindings.lhs[i] ); if (outputType != null && outputType.isArray()) { Assert.assertArrayEquals( message, (Object[]) nonVectorEval.valueOrThrow()[i], (Object[]) vectorEval.valueOrThrow()[i] ); } else { Assert.assertEquals( message, nonVectorEval.valueOrThrow()[i], vectorEval.valueOrThrow()[i] ); } } } } /** * Random backed bindings that provide positive longs in the positive integer range, random doubles, and random * strings, with a 10% null rate */ public static NonnullPair<Expr.ObjectBinding[], Expr.VectorInputBinding> makeRandomizedBindings( int vectorSize, Map<String, ExpressionType> types ) { final ThreadLocalRandom r = ThreadLocalRandom.current(); return populateBindings( vectorSize, types, () -> r.nextLong(Integer.MIN_VALUE, Integer.MAX_VALUE), () -> r.nextDouble() * (r.nextBoolean() ? 10 : -10), () -> r.nextDouble(0, 1.0) > 0.9, () -> String.valueOf(r.nextInt()) ); } /** * expression bindings that use a counter to generate double, long, and string values with a random null rate */ public static NonnullPair<Expr.ObjectBinding[], Expr.VectorInputBinding> makeSequentialBinding( int vectorSize, Map<String, ExpressionType> types, int start ) { return populateBindings( vectorSize, types, new LongSupplier() { int counter = start; @Override public long getAsLong() { return counter++; } }, new DoubleSupplier() { int counter = start; @Override public double getAsDouble() { return counter++; } }, () -> ThreadLocalRandom.current().nextBoolean(), new Supplier<>() { int counter = start; @Override public String get() { return String.valueOf(counter++); } } ); } /** * Populates a vectors worth of expression bindings */ static NonnullPair<Expr.ObjectBinding[], Expr.VectorInputBinding> populateBindings( int vectorSize, Map<String, ExpressionType> types, LongSupplier longsFn, DoubleSupplier doublesFn, BooleanSupplier nullsFn, Supplier<String> stringFn ) { SettableVectorInputBinding vectorBinding = new SettableVectorInputBinding(vectorSize); SettableObjectBinding[] objectBindings = new SettableObjectBinding[vectorSize]; Expr.InputBindingInspector inspector = InputBindings.inspectorFromTypeMap(types); for (Map.Entry<String, ExpressionType> entry : types.entrySet()) { boolean[] nulls = new boolean[vectorSize]; switch (entry.getValue().getType()) { case LONG: long[] longs = new long[vectorSize]; for (int i = 0; i < vectorSize; i++) { nulls[i] = nullsFn.getAsBoolean(); longs[i] = nulls[i] ? 0L : longsFn.getAsLong(); if (objectBindings[i] == null) { objectBindings[i] = new SettableObjectBinding().withInspector(inspector); } objectBindings[i].withBinding(entry.getKey(), nulls[i] ? null : longs[i]); } vectorBinding.addLong(entry.getKey(), longs, nulls); break; case DOUBLE: double[] doubles = new double[vectorSize]; for (int i = 0; i < vectorSize; i++) { nulls[i] = nullsFn.getAsBoolean(); doubles[i] = nulls[i] ? 0.0 : doublesFn.getAsDouble(); if (objectBindings[i] == null) { objectBindings[i] = new SettableObjectBinding().withInspector(inspector); } objectBindings[i].withBinding(entry.getKey(), nulls[i] ? null : doubles[i]); } vectorBinding.addDouble(entry.getKey(), doubles, nulls); break; case STRING: String[] strings = new String[vectorSize]; for (int i = 0; i < vectorSize; i++) { nulls[i] = nullsFn.getAsBoolean(); if (!nulls[i] && entry.getKey().startsWith("boolString")) { strings[i] = String.valueOf(nullsFn.getAsBoolean()); } else { strings[i] = nulls[i] ? null : String.valueOf(stringFn.get()); } if (objectBindings[i] == null) { objectBindings[i] = new SettableObjectBinding().withInspector(inspector); } objectBindings[i].withBinding(entry.getKey(), nulls[i] ? null : strings[i]); } vectorBinding.addString(entry.getKey(), strings); break; } } return new NonnullPair<>(objectBindings, vectorBinding); } private static Either<String, Object[]> evalVector( Expr expr, Expr.VectorInputBinding bindings, @Nullable ExpressionType outputType ) { final ExprVectorProcessor<Object> processor = expr.asVectorProcessor(bindings); final ExprEvalVector<?> vectorEval; try { vectorEval = processor.evalVector(bindings); } catch (ArithmeticException e) { // After a few occasions, Java starts throwing ArithmeticException without a message for division by zero. return Either.error(e.getClass().getName()); } catch (Exception e) { return Either.error(e.toString()); } final Object[] vectorVals = vectorEval.getObjectVector(); // if outputType is known, verify the expr returns the correct type if (outputType != null) { Assert.assertEquals("vector eval type", outputType, vectorEval.getType()); } return Either.value(vectorVals); } private static Either<String, Object[]> evalNonVector( Expr expr, Expr.ObjectBinding[] bindings, @Nullable ExpressionType outputType ) { final Object[] exprValues = new Object[VECTOR_SIZE]; for (int i = 0; i < VECTOR_SIZE; i++) { ExprEval<?> eval; try { eval = expr.eval(bindings[i]); } catch (ArithmeticException e) { // After a few occasions, Java starts throwing ArithmeticException without a message for division by zero. return Either.error(e.getClass().getName()); } catch (Exception e) { return Either.error(e.toString()); } // if outputType is known, verify the expr returns the correct type if (outputType != null && eval.value() != null) { Assert.assertEquals("nonvector eval type", eval.type(), outputType); } exprValues[i] = eval.value(); } return Either.value(exprValues); } }
googleapis/google-cloud-java
37,781
java-talent/proto-google-cloud-talent-v4beta1/src/main/java/com/google/cloud/talent/v4beta1/BatchCreateJobsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4beta1/job_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.talent.v4beta1; /** * * * <pre> * Request to create a batch of jobs. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.BatchCreateJobsRequest} */ public final class BatchCreateJobsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.BatchCreateJobsRequest) BatchCreateJobsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BatchCreateJobsRequest.newBuilder() to construct. private BatchCreateJobsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchCreateJobsRequest() { parent_ = ""; jobs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchCreateJobsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_BatchCreateJobsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_BatchCreateJobsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.class, com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int JOBS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.talent.v4beta1.Job> jobs_; /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.talent.v4beta1.Job> getJobsList() { return jobs_; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.talent.v4beta1.JobOrBuilder> getJobsOrBuilderList() { return jobs_; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getJobsCount() { return jobs_.size(); } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.talent.v4beta1.Job getJobs(int index) { return jobs_.get(index); } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.talent.v4beta1.JobOrBuilder getJobsOrBuilder(int index) { return jobs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } for (int i = 0; i < jobs_.size(); i++) { output.writeMessage(2, jobs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } for (int i = 0; i < jobs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, jobs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4beta1.BatchCreateJobsRequest)) { return super.equals(obj); } com.google.cloud.talent.v4beta1.BatchCreateJobsRequest other = (com.google.cloud.talent.v4beta1.BatchCreateJobsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getJobsList().equals(other.getJobsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (getJobsCount() > 0) { hash = (37 * hash) + JOBS_FIELD_NUMBER; hash = (53 * hash) + getJobsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.talent.v4beta1.BatchCreateJobsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to create a batch of jobs. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.BatchCreateJobsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.BatchCreateJobsRequest) com.google.cloud.talent.v4beta1.BatchCreateJobsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_BatchCreateJobsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_BatchCreateJobsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.class, com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.Builder.class); } // Construct using com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; if (jobsBuilder_ == null) { jobs_ = java.util.Collections.emptyList(); } else { jobs_ = null; jobsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_BatchCreateJobsRequest_descriptor; } @java.lang.Override public com.google.cloud.talent.v4beta1.BatchCreateJobsRequest getDefaultInstanceForType() { return com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4beta1.BatchCreateJobsRequest build() { com.google.cloud.talent.v4beta1.BatchCreateJobsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4beta1.BatchCreateJobsRequest buildPartial() { com.google.cloud.talent.v4beta1.BatchCreateJobsRequest result = new com.google.cloud.talent.v4beta1.BatchCreateJobsRequest(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.talent.v4beta1.BatchCreateJobsRequest result) { if (jobsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { jobs_ = java.util.Collections.unmodifiableList(jobs_); bitField0_ = (bitField0_ & ~0x00000002); } result.jobs_ = jobs_; } else { result.jobs_ = jobsBuilder_.build(); } } private void buildPartial0(com.google.cloud.talent.v4beta1.BatchCreateJobsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4beta1.BatchCreateJobsRequest) { return mergeFrom((com.google.cloud.talent.v4beta1.BatchCreateJobsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4beta1.BatchCreateJobsRequest other) { if (other == com.google.cloud.talent.v4beta1.BatchCreateJobsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (jobsBuilder_ == null) { if (!other.jobs_.isEmpty()) { if (jobs_.isEmpty()) { jobs_ = other.jobs_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureJobsIsMutable(); jobs_.addAll(other.jobs_); } onChanged(); } } else { if (!other.jobs_.isEmpty()) { if (jobsBuilder_.isEmpty()) { jobsBuilder_.dispose(); jobsBuilder_ = null; jobs_ = other.jobs_; bitField0_ = (bitField0_ & ~0x00000002); jobsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getJobsFieldBuilder() : null; } else { jobsBuilder_.addAllMessages(other.jobs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloud.talent.v4beta1.Job m = input.readMessage( com.google.cloud.talent.v4beta1.Job.parser(), extensionRegistry); if (jobsBuilder_ == null) { ensureJobsIsMutable(); jobs_.add(m); } else { jobsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the tenant under which the job is created. * * The format is "projects/{project_id}/tenants/{tenant_id}". For example, * "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant * is created. For example, "projects/foo". * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloud.talent.v4beta1.Job> jobs_ = java.util.Collections.emptyList(); private void ensureJobsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { jobs_ = new java.util.ArrayList<com.google.cloud.talent.v4beta1.Job>(jobs_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder> jobsBuilder_; /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.talent.v4beta1.Job> getJobsList() { if (jobsBuilder_ == null) { return java.util.Collections.unmodifiableList(jobs_); } else { return jobsBuilder_.getMessageList(); } } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getJobsCount() { if (jobsBuilder_ == null) { return jobs_.size(); } else { return jobsBuilder_.getCount(); } } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.Job getJobs(int index) { if (jobsBuilder_ == null) { return jobs_.get(index); } else { return jobsBuilder_.getMessage(index); } } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setJobs(int index, com.google.cloud.talent.v4beta1.Job value) { if (jobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobsIsMutable(); jobs_.set(index, value); onChanged(); } else { jobsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setJobs(int index, com.google.cloud.talent.v4beta1.Job.Builder builderForValue) { if (jobsBuilder_ == null) { ensureJobsIsMutable(); jobs_.set(index, builderForValue.build()); onChanged(); } else { jobsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addJobs(com.google.cloud.talent.v4beta1.Job value) { if (jobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobsIsMutable(); jobs_.add(value); onChanged(); } else { jobsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addJobs(int index, com.google.cloud.talent.v4beta1.Job value) { if (jobsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureJobsIsMutable(); jobs_.add(index, value); onChanged(); } else { jobsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addJobs(com.google.cloud.talent.v4beta1.Job.Builder builderForValue) { if (jobsBuilder_ == null) { ensureJobsIsMutable(); jobs_.add(builderForValue.build()); onChanged(); } else { jobsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addJobs(int index, com.google.cloud.talent.v4beta1.Job.Builder builderForValue) { if (jobsBuilder_ == null) { ensureJobsIsMutable(); jobs_.add(index, builderForValue.build()); onChanged(); } else { jobsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllJobs( java.lang.Iterable<? extends com.google.cloud.talent.v4beta1.Job> values) { if (jobsBuilder_ == null) { ensureJobsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, jobs_); onChanged(); } else { jobsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearJobs() { if (jobsBuilder_ == null) { jobs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { jobsBuilder_.clear(); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeJobs(int index) { if (jobsBuilder_ == null) { ensureJobsIsMutable(); jobs_.remove(index); onChanged(); } else { jobsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.Job.Builder getJobsBuilder(int index) { return getJobsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.JobOrBuilder getJobsOrBuilder(int index) { if (jobsBuilder_ == null) { return jobs_.get(index); } else { return jobsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.talent.v4beta1.JobOrBuilder> getJobsOrBuilderList() { if (jobsBuilder_ != null) { return jobsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(jobs_); } } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.Job.Builder addJobsBuilder() { return getJobsFieldBuilder() .addBuilder(com.google.cloud.talent.v4beta1.Job.getDefaultInstance()); } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.Job.Builder addJobsBuilder(int index) { return getJobsFieldBuilder() .addBuilder(index, com.google.cloud.talent.v4beta1.Job.getDefaultInstance()); } /** * * * <pre> * Required. The jobs to be created. * </pre> * * <code> * repeated .google.cloud.talent.v4beta1.Job jobs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.talent.v4beta1.Job.Builder> getJobsBuilderList() { return getJobsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder> getJobsFieldBuilder() { if (jobsBuilder_ == null) { jobsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder>( jobs_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); jobs_ = null; } return jobsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.BatchCreateJobsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.BatchCreateJobsRequest) private static final com.google.cloud.talent.v4beta1.BatchCreateJobsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.BatchCreateJobsRequest(); } public static com.google.cloud.talent.v4beta1.BatchCreateJobsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchCreateJobsRequest> PARSER = new com.google.protobuf.AbstractParser<BatchCreateJobsRequest>() { @java.lang.Override public BatchCreateJobsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchCreateJobsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchCreateJobsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4beta1.BatchCreateJobsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,883
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListSavedQueriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/dataset_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [DatasetService.ListSavedQueries][google.cloud.aiplatform.v1beta1.DatasetService.ListSavedQueries]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse} */ public final class ListSavedQueriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) ListSavedQueriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSavedQueriesResponse.newBuilder() to construct. private ListSavedQueriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSavedQueriesResponse() { savedQueries_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSavedQueriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListSavedQueriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListSavedQueriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.class, com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.Builder.class); } public static final int SAVED_QUERIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.SavedQuery> savedQueries_; /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.SavedQuery> getSavedQueriesList() { return savedQueries_; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder> getSavedQueriesOrBuilderList() { return savedQueries_; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ @java.lang.Override public int getSavedQueriesCount() { return savedQueries_.size(); } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.SavedQuery getSavedQueries(int index) { return savedQueries_.get(index); } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder getSavedQueriesOrBuilder( int index) { return savedQueries_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < savedQueries_.size(); i++) { output.writeMessage(1, savedQueries_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < savedQueries_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, savedQueries_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse other = (com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) obj; if (!getSavedQueriesList().equals(other.getSavedQueriesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSavedQueriesCount() > 0) { hash = (37 * hash) + SAVED_QUERIES_FIELD_NUMBER; hash = (53 * hash) + getSavedQueriesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [DatasetService.ListSavedQueries][google.cloud.aiplatform.v1beta1.DatasetService.ListSavedQueries]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListSavedQueriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListSavedQueriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.class, com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (savedQueriesBuilder_ == null) { savedQueries_ = java.util.Collections.emptyList(); } else { savedQueries_ = null; savedQueriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.DatasetServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListSavedQueriesResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse build() { com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse result = new com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse result) { if (savedQueriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { savedQueries_ = java.util.Collections.unmodifiableList(savedQueries_); bitField0_ = (bitField0_ & ~0x00000001); } result.savedQueries_ = savedQueries_; } else { result.savedQueries_ = savedQueriesBuilder_.build(); } } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse.getDefaultInstance()) return this; if (savedQueriesBuilder_ == null) { if (!other.savedQueries_.isEmpty()) { if (savedQueries_.isEmpty()) { savedQueries_ = other.savedQueries_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSavedQueriesIsMutable(); savedQueries_.addAll(other.savedQueries_); } onChanged(); } } else { if (!other.savedQueries_.isEmpty()) { if (savedQueriesBuilder_.isEmpty()) { savedQueriesBuilder_.dispose(); savedQueriesBuilder_ = null; savedQueries_ = other.savedQueries_; bitField0_ = (bitField0_ & ~0x00000001); savedQueriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSavedQueriesFieldBuilder() : null; } else { savedQueriesBuilder_.addAllMessages(other.savedQueries_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.SavedQuery m = input.readMessage( com.google.cloud.aiplatform.v1beta1.SavedQuery.parser(), extensionRegistry); if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); savedQueries_.add(m); } else { savedQueriesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.SavedQuery> savedQueries_ = java.util.Collections.emptyList(); private void ensureSavedQueriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { savedQueries_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.SavedQuery>(savedQueries_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SavedQuery, com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder, com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder> savedQueriesBuilder_; /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.SavedQuery> getSavedQueriesList() { if (savedQueriesBuilder_ == null) { return java.util.Collections.unmodifiableList(savedQueries_); } else { return savedQueriesBuilder_.getMessageList(); } } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public int getSavedQueriesCount() { if (savedQueriesBuilder_ == null) { return savedQueries_.size(); } else { return savedQueriesBuilder_.getCount(); } } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.SavedQuery getSavedQueries(int index) { if (savedQueriesBuilder_ == null) { return savedQueries_.get(index); } else { return savedQueriesBuilder_.getMessage(index); } } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder setSavedQueries( int index, com.google.cloud.aiplatform.v1beta1.SavedQuery value) { if (savedQueriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSavedQueriesIsMutable(); savedQueries_.set(index, value); onChanged(); } else { savedQueriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder setSavedQueries( int index, com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder builderForValue) { if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); savedQueries_.set(index, builderForValue.build()); onChanged(); } else { savedQueriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder addSavedQueries(com.google.cloud.aiplatform.v1beta1.SavedQuery value) { if (savedQueriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSavedQueriesIsMutable(); savedQueries_.add(value); onChanged(); } else { savedQueriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder addSavedQueries( int index, com.google.cloud.aiplatform.v1beta1.SavedQuery value) { if (savedQueriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSavedQueriesIsMutable(); savedQueries_.add(index, value); onChanged(); } else { savedQueriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder addSavedQueries( com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder builderForValue) { if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); savedQueries_.add(builderForValue.build()); onChanged(); } else { savedQueriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder addSavedQueries( int index, com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder builderForValue) { if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); savedQueries_.add(index, builderForValue.build()); onChanged(); } else { savedQueriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder addAllSavedQueries( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.SavedQuery> values) { if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, savedQueries_); onChanged(); } else { savedQueriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder clearSavedQueries() { if (savedQueriesBuilder_ == null) { savedQueries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { savedQueriesBuilder_.clear(); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public Builder removeSavedQueries(int index) { if (savedQueriesBuilder_ == null) { ensureSavedQueriesIsMutable(); savedQueries_.remove(index); onChanged(); } else { savedQueriesBuilder_.remove(index); } return this; } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder getSavedQueriesBuilder( int index) { return getSavedQueriesFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder getSavedQueriesOrBuilder( int index) { if (savedQueriesBuilder_ == null) { return savedQueries_.get(index); } else { return savedQueriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder> getSavedQueriesOrBuilderList() { if (savedQueriesBuilder_ != null) { return savedQueriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(savedQueries_); } } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder addSavedQueriesBuilder() { return getSavedQueriesFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.SavedQuery.getDefaultInstance()); } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder addSavedQueriesBuilder( int index) { return getSavedQueriesFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.SavedQuery.getDefaultInstance()); } /** * * * <pre> * A list of SavedQueries that match the specified filter in the request. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.SavedQuery saved_queries = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder> getSavedQueriesBuilderList() { return getSavedQueriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SavedQuery, com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder, com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder> getSavedQueriesFieldBuilder() { if (savedQueriesBuilder_ == null) { savedQueriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.SavedQuery, com.google.cloud.aiplatform.v1beta1.SavedQuery.Builder, com.google.cloud.aiplatform.v1beta1.SavedQueryOrBuilder>( savedQueries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); savedQueries_ = null; } return savedQueriesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The standard List next-page token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse) private static final com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSavedQueriesResponse> PARSER = new com.google.protobuf.AbstractParser<ListSavedQueriesResponse>() { @java.lang.Override public ListSavedQueriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSavedQueriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSavedQueriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListSavedQueriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,872
java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/CreateBackupPolicyRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/netapp/v1/backup_policy.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.netapp.v1; /** * * * <pre> * CreateBackupPolicyRequest creates a backupPolicy. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.CreateBackupPolicyRequest} */ public final class CreateBackupPolicyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.CreateBackupPolicyRequest) CreateBackupPolicyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateBackupPolicyRequest.newBuilder() to construct. private CreateBackupPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateBackupPolicyRequest() { parent_ = ""; backupPolicyId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateBackupPolicyRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupPolicyProto .internal_static_google_cloud_netapp_v1_CreateBackupPolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupPolicyProto .internal_static_google_cloud_netapp_v1_CreateBackupPolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.CreateBackupPolicyRequest.class, com.google.cloud.netapp.v1.CreateBackupPolicyRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BACKUP_POLICY_FIELD_NUMBER = 2; private com.google.cloud.netapp.v1.BackupPolicy backupPolicy_; /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backupPolicy field is set. */ @java.lang.Override public boolean hasBackupPolicy() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backupPolicy. */ @java.lang.Override public com.google.cloud.netapp.v1.BackupPolicy getBackupPolicy() { return backupPolicy_ == null ? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance() : backupPolicy_; } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.netapp.v1.BackupPolicyOrBuilder getBackupPolicyOrBuilder() { return backupPolicy_ == null ? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance() : backupPolicy_; } public static final int BACKUP_POLICY_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object backupPolicyId_ = ""; /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The backupPolicyId. */ @java.lang.Override public java.lang.String getBackupPolicyId() { java.lang.Object ref = backupPolicyId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); backupPolicyId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for backupPolicyId. */ @java.lang.Override public com.google.protobuf.ByteString getBackupPolicyIdBytes() { java.lang.Object ref = backupPolicyId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); backupPolicyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getBackupPolicy()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(backupPolicyId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, backupPolicyId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getBackupPolicy()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(backupPolicyId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, backupPolicyId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.netapp.v1.CreateBackupPolicyRequest)) { return super.equals(obj); } com.google.cloud.netapp.v1.CreateBackupPolicyRequest other = (com.google.cloud.netapp.v1.CreateBackupPolicyRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasBackupPolicy() != other.hasBackupPolicy()) return false; if (hasBackupPolicy()) { if (!getBackupPolicy().equals(other.getBackupPolicy())) return false; } if (!getBackupPolicyId().equals(other.getBackupPolicyId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasBackupPolicy()) { hash = (37 * hash) + BACKUP_POLICY_FIELD_NUMBER; hash = (53 * hash) + getBackupPolicy().hashCode(); } hash = (37 * hash) + BACKUP_POLICY_ID_FIELD_NUMBER; hash = (53 * hash) + getBackupPolicyId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.netapp.v1.CreateBackupPolicyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * CreateBackupPolicyRequest creates a backupPolicy. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.CreateBackupPolicyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.CreateBackupPolicyRequest) com.google.cloud.netapp.v1.CreateBackupPolicyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupPolicyProto .internal_static_google_cloud_netapp_v1_CreateBackupPolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupPolicyProto .internal_static_google_cloud_netapp_v1_CreateBackupPolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.CreateBackupPolicyRequest.class, com.google.cloud.netapp.v1.CreateBackupPolicyRequest.Builder.class); } // Construct using com.google.cloud.netapp.v1.CreateBackupPolicyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getBackupPolicyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; backupPolicy_ = null; if (backupPolicyBuilder_ != null) { backupPolicyBuilder_.dispose(); backupPolicyBuilder_ = null; } backupPolicyId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.netapp.v1.BackupPolicyProto .internal_static_google_cloud_netapp_v1_CreateBackupPolicyRequest_descriptor; } @java.lang.Override public com.google.cloud.netapp.v1.CreateBackupPolicyRequest getDefaultInstanceForType() { return com.google.cloud.netapp.v1.CreateBackupPolicyRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.netapp.v1.CreateBackupPolicyRequest build() { com.google.cloud.netapp.v1.CreateBackupPolicyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.netapp.v1.CreateBackupPolicyRequest buildPartial() { com.google.cloud.netapp.v1.CreateBackupPolicyRequest result = new com.google.cloud.netapp.v1.CreateBackupPolicyRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.netapp.v1.CreateBackupPolicyRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.backupPolicy_ = backupPolicyBuilder_ == null ? backupPolicy_ : backupPolicyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.backupPolicyId_ = backupPolicyId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.netapp.v1.CreateBackupPolicyRequest) { return mergeFrom((com.google.cloud.netapp.v1.CreateBackupPolicyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.netapp.v1.CreateBackupPolicyRequest other) { if (other == com.google.cloud.netapp.v1.CreateBackupPolicyRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasBackupPolicy()) { mergeBackupPolicy(other.getBackupPolicy()); } if (!other.getBackupPolicyId().isEmpty()) { backupPolicyId_ = other.backupPolicyId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getBackupPolicyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { backupPolicyId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The location to create the backup policies of, in the format * `projects/{project_id}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.netapp.v1.BackupPolicy backupPolicy_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupPolicy, com.google.cloud.netapp.v1.BackupPolicy.Builder, com.google.cloud.netapp.v1.BackupPolicyOrBuilder> backupPolicyBuilder_; /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backupPolicy field is set. */ public boolean hasBackupPolicy() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backupPolicy. */ public com.google.cloud.netapp.v1.BackupPolicy getBackupPolicy() { if (backupPolicyBuilder_ == null) { return backupPolicy_ == null ? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance() : backupPolicy_; } else { return backupPolicyBuilder_.getMessage(); } } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackupPolicy(com.google.cloud.netapp.v1.BackupPolicy value) { if (backupPolicyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } backupPolicy_ = value; } else { backupPolicyBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackupPolicy( com.google.cloud.netapp.v1.BackupPolicy.Builder builderForValue) { if (backupPolicyBuilder_ == null) { backupPolicy_ = builderForValue.build(); } else { backupPolicyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBackupPolicy(com.google.cloud.netapp.v1.BackupPolicy value) { if (backupPolicyBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && backupPolicy_ != null && backupPolicy_ != com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()) { getBackupPolicyBuilder().mergeFrom(value); } else { backupPolicy_ = value; } } else { backupPolicyBuilder_.mergeFrom(value); } if (backupPolicy_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBackupPolicy() { bitField0_ = (bitField0_ & ~0x00000002); backupPolicy_ = null; if (backupPolicyBuilder_ != null) { backupPolicyBuilder_.dispose(); backupPolicyBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.BackupPolicy.Builder getBackupPolicyBuilder() { bitField0_ |= 0x00000002; onChanged(); return getBackupPolicyFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.BackupPolicyOrBuilder getBackupPolicyOrBuilder() { if (backupPolicyBuilder_ != null) { return backupPolicyBuilder_.getMessageOrBuilder(); } else { return backupPolicy_ == null ? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance() : backupPolicy_; } } /** * * * <pre> * Required. A backupPolicy resource * </pre> * * <code> * .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupPolicy, com.google.cloud.netapp.v1.BackupPolicy.Builder, com.google.cloud.netapp.v1.BackupPolicyOrBuilder> getBackupPolicyFieldBuilder() { if (backupPolicyBuilder_ == null) { backupPolicyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupPolicy, com.google.cloud.netapp.v1.BackupPolicy.Builder, com.google.cloud.netapp.v1.BackupPolicyOrBuilder>( getBackupPolicy(), getParentForChildren(), isClean()); backupPolicy_ = null; } return backupPolicyBuilder_; } private java.lang.Object backupPolicyId_ = ""; /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The backupPolicyId. */ public java.lang.String getBackupPolicyId() { java.lang.Object ref = backupPolicyId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); backupPolicyId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for backupPolicyId. */ public com.google.protobuf.ByteString getBackupPolicyIdBytes() { java.lang.Object ref = backupPolicyId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); backupPolicyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The backupPolicyId to set. * @return This builder for chaining. */ public Builder setBackupPolicyId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } backupPolicyId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearBackupPolicyId() { backupPolicyId_ = getDefaultInstance().getBackupPolicyId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the backup policy. * The ID must be unique within the specified location. * Must contain only letters, numbers and hyphen, with the first * character a letter, the last a letter or a * number, and a 63 character maximum. * </pre> * * <code>string backup_policy_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for backupPolicyId to set. * @return This builder for chaining. */ public Builder setBackupPolicyIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); backupPolicyId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.CreateBackupPolicyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.CreateBackupPolicyRequest) private static final com.google.cloud.netapp.v1.CreateBackupPolicyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.CreateBackupPolicyRequest(); } public static com.google.cloud.netapp.v1.CreateBackupPolicyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateBackupPolicyRequest> PARSER = new com.google.protobuf.AbstractParser<CreateBackupPolicyRequest>() { @java.lang.Override public CreateBackupPolicyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateBackupPolicyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateBackupPolicyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.netapp.v1.CreateBackupPolicyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,883
java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/CreateWorkflowConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataform/v1/dataform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataform.v1; /** * * * <pre> * `CreateWorkflowConfig` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.CreateWorkflowConfigRequest} */ public final class CreateWorkflowConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.CreateWorkflowConfigRequest) CreateWorkflowConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateWorkflowConfigRequest.newBuilder() to construct. private CreateWorkflowConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateWorkflowConfigRequest() { parent_ = ""; workflowConfigId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateWorkflowConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateWorkflowConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateWorkflowConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.class, com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int WORKFLOW_CONFIG_FIELD_NUMBER = 2; private com.google.cloud.dataform.v1.WorkflowConfig workflowConfig_; /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workflowConfig field is set. */ @java.lang.Override public boolean hasWorkflowConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workflowConfig. */ @java.lang.Override public com.google.cloud.dataform.v1.WorkflowConfig getWorkflowConfig() { return workflowConfig_ == null ? com.google.cloud.dataform.v1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataform.v1.WorkflowConfigOrBuilder getWorkflowConfigOrBuilder() { return workflowConfig_ == null ? com.google.cloud.dataform.v1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } public static final int WORKFLOW_CONFIG_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object workflowConfigId_ = ""; /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The workflowConfigId. */ @java.lang.Override public java.lang.String getWorkflowConfigId() { java.lang.Object ref = workflowConfigId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workflowConfigId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for workflowConfigId. */ @java.lang.Override public com.google.protobuf.ByteString getWorkflowConfigIdBytes() { java.lang.Object ref = workflowConfigId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workflowConfigId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getWorkflowConfig()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workflowConfigId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, workflowConfigId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWorkflowConfig()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workflowConfigId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, workflowConfigId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataform.v1.CreateWorkflowConfigRequest)) { return super.equals(obj); } com.google.cloud.dataform.v1.CreateWorkflowConfigRequest other = (com.google.cloud.dataform.v1.CreateWorkflowConfigRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasWorkflowConfig() != other.hasWorkflowConfig()) return false; if (hasWorkflowConfig()) { if (!getWorkflowConfig().equals(other.getWorkflowConfig())) return false; } if (!getWorkflowConfigId().equals(other.getWorkflowConfigId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasWorkflowConfig()) { hash = (37 * hash) + WORKFLOW_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getWorkflowConfig().hashCode(); } hash = (37 * hash) + WORKFLOW_CONFIG_ID_FIELD_NUMBER; hash = (53 * hash) + getWorkflowConfigId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dataform.v1.CreateWorkflowConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * `CreateWorkflowConfig` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.CreateWorkflowConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.CreateWorkflowConfigRequest) com.google.cloud.dataform.v1.CreateWorkflowConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateWorkflowConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateWorkflowConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.class, com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.Builder.class); } // Construct using com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getWorkflowConfigFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; workflowConfig_ = null; if (workflowConfigBuilder_ != null) { workflowConfigBuilder_.dispose(); workflowConfigBuilder_ = null; } workflowConfigId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateWorkflowConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.dataform.v1.CreateWorkflowConfigRequest getDefaultInstanceForType() { return com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataform.v1.CreateWorkflowConfigRequest build() { com.google.cloud.dataform.v1.CreateWorkflowConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataform.v1.CreateWorkflowConfigRequest buildPartial() { com.google.cloud.dataform.v1.CreateWorkflowConfigRequest result = new com.google.cloud.dataform.v1.CreateWorkflowConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataform.v1.CreateWorkflowConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.workflowConfig_ = workflowConfigBuilder_ == null ? workflowConfig_ : workflowConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.workflowConfigId_ = workflowConfigId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataform.v1.CreateWorkflowConfigRequest) { return mergeFrom((com.google.cloud.dataform.v1.CreateWorkflowConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataform.v1.CreateWorkflowConfigRequest other) { if (other == com.google.cloud.dataform.v1.CreateWorkflowConfigRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasWorkflowConfig()) { mergeWorkflowConfig(other.getWorkflowConfig()); } if (!other.getWorkflowConfigId().isEmpty()) { workflowConfigId_ = other.workflowConfigId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getWorkflowConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { workflowConfigId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The repository in which to create the workflow config. Must be in * the format `projects/&#42;&#47;locations/&#42;&#47;repositories/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dataform.v1.WorkflowConfig workflowConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.WorkflowConfig, com.google.cloud.dataform.v1.WorkflowConfig.Builder, com.google.cloud.dataform.v1.WorkflowConfigOrBuilder> workflowConfigBuilder_; /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the workflowConfig field is set. */ public boolean hasWorkflowConfig() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The workflowConfig. */ public com.google.cloud.dataform.v1.WorkflowConfig getWorkflowConfig() { if (workflowConfigBuilder_ == null) { return workflowConfig_ == null ? com.google.cloud.dataform.v1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } else { return workflowConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkflowConfig(com.google.cloud.dataform.v1.WorkflowConfig value) { if (workflowConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } workflowConfig_ = value; } else { workflowConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setWorkflowConfig( com.google.cloud.dataform.v1.WorkflowConfig.Builder builderForValue) { if (workflowConfigBuilder_ == null) { workflowConfig_ = builderForValue.build(); } else { workflowConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeWorkflowConfig(com.google.cloud.dataform.v1.WorkflowConfig value) { if (workflowConfigBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && workflowConfig_ != null && workflowConfig_ != com.google.cloud.dataform.v1.WorkflowConfig.getDefaultInstance()) { getWorkflowConfigBuilder().mergeFrom(value); } else { workflowConfig_ = value; } } else { workflowConfigBuilder_.mergeFrom(value); } if (workflowConfig_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearWorkflowConfig() { bitField0_ = (bitField0_ & ~0x00000002); workflowConfig_ = null; if (workflowConfigBuilder_ != null) { workflowConfigBuilder_.dispose(); workflowConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1.WorkflowConfig.Builder getWorkflowConfigBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWorkflowConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1.WorkflowConfigOrBuilder getWorkflowConfigOrBuilder() { if (workflowConfigBuilder_ != null) { return workflowConfigBuilder_.getMessageOrBuilder(); } else { return workflowConfig_ == null ? com.google.cloud.dataform.v1.WorkflowConfig.getDefaultInstance() : workflowConfig_; } } /** * * * <pre> * Required. The workflow config to create. * </pre> * * <code> * .google.cloud.dataform.v1.WorkflowConfig workflow_config = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.WorkflowConfig, com.google.cloud.dataform.v1.WorkflowConfig.Builder, com.google.cloud.dataform.v1.WorkflowConfigOrBuilder> getWorkflowConfigFieldBuilder() { if (workflowConfigBuilder_ == null) { workflowConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.WorkflowConfig, com.google.cloud.dataform.v1.WorkflowConfig.Builder, com.google.cloud.dataform.v1.WorkflowConfigOrBuilder>( getWorkflowConfig(), getParentForChildren(), isClean()); workflowConfig_ = null; } return workflowConfigBuilder_; } private java.lang.Object workflowConfigId_ = ""; /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The workflowConfigId. */ public java.lang.String getWorkflowConfigId() { java.lang.Object ref = workflowConfigId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); workflowConfigId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for workflowConfigId. */ public com.google.protobuf.ByteString getWorkflowConfigIdBytes() { java.lang.Object ref = workflowConfigId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); workflowConfigId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The workflowConfigId to set. * @return This builder for chaining. */ public Builder setWorkflowConfigId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } workflowConfigId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearWorkflowConfigId() { workflowConfigId_ = getDefaultInstance().getWorkflowConfigId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the workflow config, which will become the * final component of the workflow config's resource name. * </pre> * * <code>string workflow_config_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for workflowConfigId to set. * @return This builder for chaining. */ public Builder setWorkflowConfigIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); workflowConfigId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.CreateWorkflowConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.CreateWorkflowConfigRequest) private static final com.google.cloud.dataform.v1.CreateWorkflowConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.CreateWorkflowConfigRequest(); } public static com.google.cloud.dataform.v1.CreateWorkflowConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateWorkflowConfigRequest> PARSER = new com.google.protobuf.AbstractParser<CreateWorkflowConfigRequest>() { @java.lang.Override public CreateWorkflowConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateWorkflowConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateWorkflowConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataform.v1.CreateWorkflowConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop-common
37,816
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/shortcircuit/ShortCircuitCache.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.shortcircuit; import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.nio.MappedByteBuffer; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.ExtendedBlockId; import org.apache.hadoop.hdfs.net.DomainPeer; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.datatransfer.Sender; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReleaseShortCircuitAccessResponseProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.net.unix.DomainSocket; import org.apache.hadoop.net.unix.DomainSocketWatcher; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Waitable; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * The ShortCircuitCache tracks things which the client needs to access * HDFS block files via short-circuit. * * These things include: memory-mapped regions, file descriptors, and shared * memory areas for communicating with the DataNode. */ @InterfaceAudience.Private public class ShortCircuitCache implements Closeable { public static final Log LOG = LogFactory.getLog(ShortCircuitCache.class); /** * Expiry thread which makes sure that the file descriptors get closed * after a while. */ private class CacheCleaner implements Runnable, Closeable { private ScheduledFuture<?> future; /** * Run the CacheCleaner thread. * * Whenever a thread requests a ShortCircuitReplica object, we will make * sure it gets one. That ShortCircuitReplica object can then be re-used * when another thread requests a ShortCircuitReplica object for the same * block. So in that sense, there is no maximum size to the cache. * * However, when a ShortCircuitReplica object is unreferenced by the * thread(s) that are using it, it becomes evictable. There are two * separate eviction lists-- one for mmaped objects, and another for * non-mmaped objects. We do this in order to avoid having the regular * files kick the mmaped files out of the cache too quickly. Reusing * an already-existing mmap gives a huge performance boost, since the * page table entries don't have to be re-populated. Both the mmap * and non-mmap evictable lists have maximum sizes and maximum lifespans. */ @Override public void run() { ShortCircuitCache.this.lock.lock(); try { if (ShortCircuitCache.this.closed) return; long curMs = Time.monotonicNow(); if (LOG.isDebugEnabled()) { LOG.debug(this + ": cache cleaner running at " + curMs); } int numDemoted = demoteOldEvictableMmaped(curMs); int numPurged = 0; Long evictionTimeNs = Long.valueOf(0); while (true) { Entry<Long, ShortCircuitReplica> entry = evictable.ceilingEntry(evictionTimeNs); if (entry == null) break; evictionTimeNs = entry.getKey(); long evictionTimeMs = TimeUnit.MILLISECONDS.convert(evictionTimeNs, TimeUnit.NANOSECONDS); if (evictionTimeMs + maxNonMmappedEvictableLifespanMs >= curMs) break; ShortCircuitReplica replica = entry.getValue(); if (LOG.isTraceEnabled()) { LOG.trace("CacheCleaner: purging " + replica + ": " + StringUtils.getStackTrace(Thread.currentThread())); } purge(replica); numPurged++; } if (LOG.isDebugEnabled()) { LOG.debug(this + ": finishing cache cleaner run started at " + curMs + ". Demoted " + numDemoted + " mmapped replicas; " + "purged " + numPurged + " replicas."); } } finally { ShortCircuitCache.this.lock.unlock(); } } @Override public void close() throws IOException { if (future != null) { future.cancel(false); } } public void setFuture(ScheduledFuture<?> future) { this.future = future; } /** * Get the rate at which this cleaner thread should be scheduled. * * We do this by taking the minimum expiration time and dividing by 4. * * @return the rate in milliseconds at which this thread should be * scheduled. */ public long getRateInMs() { long minLifespanMs = Math.min(maxNonMmappedEvictableLifespanMs, maxEvictableMmapedLifespanMs); long sampleTimeMs = minLifespanMs / 4; return (sampleTimeMs < 1) ? 1 : sampleTimeMs; } } /** * A task which asks the DataNode to release a short-circuit shared memory * slot. If successful, this will tell the DataNode to stop monitoring * changes to the mlock status of the replica associated with the slot. * It will also allow us (the client) to re-use this slot for another * replica. If we can't communicate with the DataNode for some reason, * we tear down the shared memory segment to avoid being in an inconsistent * state. */ private class SlotReleaser implements Runnable { /** * The slot that we need to release. */ private final Slot slot; SlotReleaser(Slot slot) { this.slot = slot; } @Override public void run() { if (LOG.isTraceEnabled()) { LOG.trace(ShortCircuitCache.this + ": about to release " + slot); } final DfsClientShm shm = (DfsClientShm)slot.getShm(); final DomainSocket shmSock = shm.getPeer().getDomainSocket(); DomainSocket sock = null; DataOutputStream out = null; final String path = shmSock.getPath(); boolean success = false; try { sock = DomainSocket.connect(path); out = new DataOutputStream( new BufferedOutputStream(sock.getOutputStream())); new Sender(out).releaseShortCircuitFds(slot.getSlotId()); DataInputStream in = new DataInputStream(sock.getInputStream()); ReleaseShortCircuitAccessResponseProto resp = ReleaseShortCircuitAccessResponseProto.parseFrom( PBHelper.vintPrefixed(in)); if (resp.getStatus() != Status.SUCCESS) { String error = resp.hasError() ? resp.getError() : "(unknown)"; throw new IOException(resp.getStatus().toString() + ": " + error); } if (LOG.isTraceEnabled()) { LOG.trace(ShortCircuitCache.this + ": released " + slot); } success = true; } catch (IOException e) { LOG.error(ShortCircuitCache.this + ": failed to release " + "short-circuit shared memory slot " + slot + " by sending " + "ReleaseShortCircuitAccessRequestProto to " + path + ". Closing shared memory segment.", e); } finally { if (success) { shmManager.freeSlot(slot); } else { shm.getEndpointShmManager().shutdown(shm); } IOUtils.cleanup(LOG, sock, out); } } } public interface ShortCircuitReplicaCreator { /** * Attempt to create a ShortCircuitReplica object. * * This callback will be made without holding any locks. * * @return a non-null ShortCircuitReplicaInfo object. */ ShortCircuitReplicaInfo createShortCircuitReplicaInfo(); } /** * Lock protecting the cache. */ private final ReentrantLock lock = new ReentrantLock(); /** * The executor service that runs the cacheCleaner. */ private final ScheduledThreadPoolExecutor cleanerExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder(). setDaemon(true).setNameFormat("ShortCircuitCache_Cleaner"). build()); /** * The executor service that runs the cacheCleaner. */ private final ScheduledThreadPoolExecutor releaserExecutor = new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder(). setDaemon(true).setNameFormat("ShortCircuitCache_SlotReleaser"). build()); /** * A map containing all ShortCircuitReplicaInfo objects, organized by Key. * ShortCircuitReplicaInfo objects may contain a replica, or an InvalidToken * exception. */ private final HashMap<ExtendedBlockId, Waitable<ShortCircuitReplicaInfo>> replicaInfoMap = new HashMap<ExtendedBlockId, Waitable<ShortCircuitReplicaInfo>>(); /** * The CacheCleaner. We don't create this and schedule it until it becomes * necessary. */ private CacheCleaner cacheCleaner; /** * Tree of evictable elements. * * Maps (unique) insertion time in nanoseconds to the element. */ private final TreeMap<Long, ShortCircuitReplica> evictable = new TreeMap<Long, ShortCircuitReplica>(); /** * Maximum total size of the cache, including both mmapped and * no$-mmapped elements. */ private final int maxTotalSize; /** * Non-mmaped elements older than this will be closed. */ private long maxNonMmappedEvictableLifespanMs; /** * Tree of mmaped evictable elements. * * Maps (unique) insertion time in nanoseconds to the element. */ private final TreeMap<Long, ShortCircuitReplica> evictableMmapped = new TreeMap<Long, ShortCircuitReplica>(); /** * Maximum number of mmaped evictable elements. */ private int maxEvictableMmapedSize; /** * Mmaped elements older than this will be closed. */ private final long maxEvictableMmapedLifespanMs; /** * The minimum number of milliseconds we'll wait after an unsuccessful * mmap attempt before trying again. */ private final long mmapRetryTimeoutMs; /** * How long we will keep replicas in the cache before declaring them * to be stale. */ private final long staleThresholdMs; /** * True if the ShortCircuitCache is closed. */ private boolean closed = false; /** * Number of existing mmaps associated with this cache. */ private int outstandingMmapCount = 0; /** * Manages short-circuit shared memory segments for the client. */ private final DfsClientShmManager shmManager; /** * Create a {@link ShortCircuitCache} object from a {@link Configuration} */ public static ShortCircuitCache fromConf(Configuration conf) { return new ShortCircuitCache( conf.getInt(DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_SIZE_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_SIZE_DEFAULT), conf.getLong(DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS_KEY, DFSConfigKeys.DFS_CLIENT_READ_SHORTCIRCUIT_STREAMS_CACHE_EXPIRY_MS_DEFAULT), conf.getInt(DFSConfigKeys.DFS_CLIENT_MMAP_CACHE_SIZE, DFSConfigKeys.DFS_CLIENT_MMAP_CACHE_SIZE_DEFAULT), conf.getLong(DFSConfigKeys.DFS_CLIENT_MMAP_CACHE_TIMEOUT_MS, DFSConfigKeys.DFS_CLIENT_MMAP_CACHE_TIMEOUT_MS_DEFAULT), conf.getLong(DFSConfigKeys.DFS_CLIENT_MMAP_RETRY_TIMEOUT_MS, DFSConfigKeys.DFS_CLIENT_MMAP_RETRY_TIMEOUT_MS_DEFAULT), conf.getLong(DFSConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_REPLICA_STALE_THRESHOLD_MS, DFSConfigKeys.DFS_CLIENT_SHORT_CIRCUIT_REPLICA_STALE_THRESHOLD_MS_DEFAULT), conf.getInt(DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS, DFSConfigKeys.DFS_SHORT_CIRCUIT_SHARED_MEMORY_WATCHER_INTERRUPT_CHECK_MS_DEFAULT)); } public ShortCircuitCache(int maxTotalSize, long maxNonMmappedEvictableLifespanMs, int maxEvictableMmapedSize, long maxEvictableMmapedLifespanMs, long mmapRetryTimeoutMs, long staleThresholdMs, int shmInterruptCheckMs) { Preconditions.checkArgument(maxTotalSize >= 0); this.maxTotalSize = maxTotalSize; Preconditions.checkArgument(maxNonMmappedEvictableLifespanMs >= 0); this.maxNonMmappedEvictableLifespanMs = maxNonMmappedEvictableLifespanMs; Preconditions.checkArgument(maxEvictableMmapedSize >= 0); this.maxEvictableMmapedSize = maxEvictableMmapedSize; Preconditions.checkArgument(maxEvictableMmapedLifespanMs >= 0); this.maxEvictableMmapedLifespanMs = maxEvictableMmapedLifespanMs; this.mmapRetryTimeoutMs = mmapRetryTimeoutMs; this.staleThresholdMs = staleThresholdMs; DfsClientShmManager shmManager = null; if ((shmInterruptCheckMs > 0) && (DomainSocketWatcher.getLoadingFailureReason() == null)) { try { shmManager = new DfsClientShmManager(shmInterruptCheckMs); } catch (IOException e) { LOG.error("failed to create ShortCircuitShmManager", e); } } this.shmManager = shmManager; } public long getStaleThresholdMs() { return staleThresholdMs; } /** * Increment the reference count of a replica, and remove it from any free * list it may be in. * * You must hold the cache lock while calling this function. * * @param replica The replica we're removing. */ private void ref(ShortCircuitReplica replica) { lock.lock(); try { Preconditions.checkArgument(replica.refCount > 0, "can't ref " + replica + " because its refCount reached " + replica.refCount); Long evictableTimeNs = replica.getEvictableTimeNs(); replica.refCount++; if (evictableTimeNs != null) { String removedFrom = removeEvictable(replica); if (LOG.isTraceEnabled()) { LOG.trace(this + ": " + removedFrom + " no longer contains " + replica + ". refCount " + (replica.refCount - 1) + " -> " + replica.refCount + StringUtils.getStackTrace(Thread.currentThread())); } } else if (LOG.isTraceEnabled()) { LOG.trace(this + ": replica refCount " + (replica.refCount - 1) + " -> " + replica.refCount + StringUtils.getStackTrace(Thread.currentThread())); } } finally { lock.unlock(); } } /** * Unreference a replica. * * You must hold the cache lock while calling this function. * * @param replica The replica being unreferenced. */ void unref(ShortCircuitReplica replica) { lock.lock(); try { // If the replica is stale or unusable, but we haven't purged it yet, // let's do that. It would be a shame to evict a non-stale replica so // that we could put a stale or unusable one into the cache. if (!replica.purged) { String purgeReason = null; if (!replica.getDataStream().getChannel().isOpen()) { purgeReason = "purging replica because its data channel is closed."; } else if (!replica.getMetaStream().getChannel().isOpen()) { purgeReason = "purging replica because its meta channel is closed."; } else if (replica.isStale()) { purgeReason = "purging replica because it is stale."; } if (purgeReason != null) { LOG.debug(this + ": " + purgeReason); purge(replica); } } String addedString = ""; boolean shouldTrimEvictionMaps = false; int newRefCount = --replica.refCount; if (newRefCount == 0) { // Close replica, since there are no remaining references to it. Preconditions.checkArgument(replica.purged, "Replica " + replica + " reached a refCount of 0 without " + "being purged"); replica.close(); } else if (newRefCount == 1) { Preconditions.checkState(null == replica.getEvictableTimeNs(), "Replica " + replica + " had a refCount higher than 1, " + "but was still evictable (evictableTimeNs = " + replica.getEvictableTimeNs() + ")"); if (!replica.purged) { // Add the replica to the end of an eviction list. // Eviction lists are sorted by time. if (replica.hasMmap()) { insertEvictable(System.nanoTime(), replica, evictableMmapped); addedString = "added to evictableMmapped, "; } else { insertEvictable(System.nanoTime(), replica, evictable); addedString = "added to evictable, "; } shouldTrimEvictionMaps = true; } } else { Preconditions.checkArgument(replica.refCount >= 0, "replica's refCount went negative (refCount = " + replica.refCount + " for " + replica + ")"); } if (LOG.isTraceEnabled()) { LOG.trace(this + ": unref replica " + replica + ": " + addedString + " refCount " + (newRefCount + 1) + " -> " + newRefCount + StringUtils.getStackTrace(Thread.currentThread())); } if (shouldTrimEvictionMaps) { trimEvictionMaps(); } } finally { lock.unlock(); } } /** * Demote old evictable mmaps into the regular eviction map. * * You must hold the cache lock while calling this function. * * @param now Current time in monotonic milliseconds. * @return Number of replicas demoted. */ private int demoteOldEvictableMmaped(long now) { int numDemoted = 0; boolean needMoreSpace = false; Long evictionTimeNs = Long.valueOf(0); while (true) { Entry<Long, ShortCircuitReplica> entry = evictableMmapped.ceilingEntry(evictionTimeNs); if (entry == null) break; evictionTimeNs = entry.getKey(); long evictionTimeMs = TimeUnit.MILLISECONDS.convert(evictionTimeNs, TimeUnit.NANOSECONDS); if (evictionTimeMs + maxEvictableMmapedLifespanMs >= now) { if (evictableMmapped.size() < maxEvictableMmapedSize) { break; } needMoreSpace = true; } ShortCircuitReplica replica = entry.getValue(); if (LOG.isTraceEnabled()) { String rationale = needMoreSpace ? "because we need more space" : "because it's too old"; LOG.trace("demoteOldEvictable: demoting " + replica + ": " + rationale + ": " + StringUtils.getStackTrace(Thread.currentThread())); } removeEvictable(replica, evictableMmapped); munmap(replica); insertEvictable(evictionTimeNs, replica, evictable); numDemoted++; } return numDemoted; } /** * Trim the eviction lists. */ private void trimEvictionMaps() { long now = Time.monotonicNow(); demoteOldEvictableMmaped(now); while (true) { long evictableSize = evictable.size(); long evictableMmappedSize = evictableMmapped.size(); if (evictableSize + evictableMmappedSize <= maxTotalSize) { return; } ShortCircuitReplica replica; if (evictableSize == 0) { replica = evictableMmapped.firstEntry().getValue(); } else { replica = evictable.firstEntry().getValue(); } if (LOG.isTraceEnabled()) { LOG.trace(this + ": trimEvictionMaps is purging " + replica + StringUtils.getStackTrace(Thread.currentThread())); } purge(replica); } } /** * Munmap a replica, updating outstandingMmapCount. * * @param replica The replica to munmap. */ private void munmap(ShortCircuitReplica replica) { replica.munmap(); outstandingMmapCount--; } /** * Remove a replica from an evictable map. * * @param replica The replica to remove. * @return The map it was removed from. */ private String removeEvictable(ShortCircuitReplica replica) { if (replica.hasMmap()) { removeEvictable(replica, evictableMmapped); return "evictableMmapped"; } else { removeEvictable(replica, evictable); return "evictable"; } } /** * Remove a replica from an evictable map. * * @param replica The replica to remove. * @param map The map to remove it from. */ private void removeEvictable(ShortCircuitReplica replica, TreeMap<Long, ShortCircuitReplica> map) { Long evictableTimeNs = replica.getEvictableTimeNs(); Preconditions.checkNotNull(evictableTimeNs); ShortCircuitReplica removed = map.remove(evictableTimeNs); Preconditions.checkState(removed == replica, "failed to make " + replica + " unevictable"); replica.setEvictableTimeNs(null); } /** * Insert a replica into an evictable map. * * If an element already exists with this eviction time, we add a nanosecond * to it until we find an unused key. * * @param evictionTimeNs The eviction time in absolute nanoseconds. * @param replica The replica to insert. * @param map The map to insert it into. */ private void insertEvictable(Long evictionTimeNs, ShortCircuitReplica replica, TreeMap<Long, ShortCircuitReplica> map) { while (map.containsKey(evictionTimeNs)) { evictionTimeNs++; } Preconditions.checkState(null == replica.getEvictableTimeNs()); replica.setEvictableTimeNs(evictionTimeNs); map.put(evictionTimeNs, replica); } /** * Purge a replica from the cache. * * This doesn't necessarily close the replica, since there may be * outstanding references to it. However, it does mean the cache won't * hand it out to anyone after this. * * You must hold the cache lock while calling this function. * * @param replica The replica being removed. */ private void purge(ShortCircuitReplica replica) { boolean removedFromInfoMap = false; String evictionMapName = null; Preconditions.checkArgument(!replica.purged); replica.purged = true; Waitable<ShortCircuitReplicaInfo> val = replicaInfoMap.get(replica.key); if (val != null) { ShortCircuitReplicaInfo info = val.getVal(); if ((info != null) && (info.getReplica() == replica)) { replicaInfoMap.remove(replica.key); removedFromInfoMap = true; } } Long evictableTimeNs = replica.getEvictableTimeNs(); if (evictableTimeNs != null) { evictionMapName = removeEvictable(replica); } if (LOG.isTraceEnabled()) { StringBuilder builder = new StringBuilder(); builder.append(this).append(": ").append(": purged "). append(replica).append(" from the cache."); if (removedFromInfoMap) { builder.append(" Removed from the replicaInfoMap."); } if (evictionMapName != null) { builder.append(" Removed from ").append(evictionMapName); } LOG.trace(builder.toString()); } unref(replica); } /** * Fetch or create a replica. * * You must hold the cache lock while calling this function. * * @param key Key to use for lookup. * @param creator Replica creator callback. Will be called without * the cache lock being held. * * @return Null if no replica could be found or created. * The replica, otherwise. */ public ShortCircuitReplicaInfo fetchOrCreate(ExtendedBlockId key, ShortCircuitReplicaCreator creator) { Waitable<ShortCircuitReplicaInfo> newWaitable = null; lock.lock(); try { ShortCircuitReplicaInfo info = null; do { if (closed) { if (LOG.isTraceEnabled()) { LOG.trace(this + ": can't fetchOrCreate " + key + " because the cache is closed."); } return null; } Waitable<ShortCircuitReplicaInfo> waitable = replicaInfoMap.get(key); if (waitable != null) { try { info = fetch(key, waitable); } catch (RetriableException e) { if (LOG.isDebugEnabled()) { LOG.debug(this + ": retrying " + e.getMessage()); } continue; } } } while (false); if (info != null) return info; // We need to load the replica ourselves. newWaitable = new Waitable<ShortCircuitReplicaInfo>(lock.newCondition()); replicaInfoMap.put(key, newWaitable); } finally { lock.unlock(); } return create(key, creator, newWaitable); } /** * Fetch an existing ReplicaInfo object. * * @param key The key that we're using. * @param waitable The waitable object to wait on. * @return The existing ReplicaInfo object, or null if there is * none. * * @throws RetriableException If the caller needs to retry. */ private ShortCircuitReplicaInfo fetch(ExtendedBlockId key, Waitable<ShortCircuitReplicaInfo> waitable) throws RetriableException { // Another thread is already in the process of loading this // ShortCircuitReplica. So we simply wait for it to complete. ShortCircuitReplicaInfo info; try { if (LOG.isTraceEnabled()) { LOG.trace(this + ": found waitable for " + key); } info = waitable.await(); } catch (InterruptedException e) { LOG.info(this + ": interrupted while waiting for " + key); Thread.currentThread().interrupt(); throw new RetriableException("interrupted"); } if (info.getInvalidTokenException() != null) { LOG.warn(this + ": could not get " + key + " due to InvalidToken " + "exception.", info.getInvalidTokenException()); return info; } ShortCircuitReplica replica = info.getReplica(); if (replica == null) { LOG.warn(this + ": failed to get " + key); return info; } if (replica.purged) { // Ignore replicas that have already been purged from the cache. throw new RetriableException("Ignoring purged replica " + replica + ". Retrying."); } // Check if the replica is stale before using it. // If it is, purge it and retry. if (replica.isStale()) { LOG.info(this + ": got stale replica " + replica + ". Removing " + "this replica from the replicaInfoMap and retrying."); // Remove the cache's reference to the replica. This may or may not // trigger a close. purge(replica); throw new RetriableException("ignoring stale replica " + replica); } ref(replica); return info; } private ShortCircuitReplicaInfo create(ExtendedBlockId key, ShortCircuitReplicaCreator creator, Waitable<ShortCircuitReplicaInfo> newWaitable) { // Handle loading a new replica. ShortCircuitReplicaInfo info = null; try { if (LOG.isTraceEnabled()) { LOG.trace(this + ": loading " + key); } info = creator.createShortCircuitReplicaInfo(); } catch (RuntimeException e) { LOG.warn(this + ": failed to load " + key, e); } if (info == null) info = new ShortCircuitReplicaInfo(); lock.lock(); try { if (info.getReplica() != null) { // On success, make sure the cache cleaner thread is running. if (LOG.isTraceEnabled()) { LOG.trace(this + ": successfully loaded " + info.getReplica()); } startCacheCleanerThreadIfNeeded(); // Note: new ShortCircuitReplicas start with a refCount of 2, // indicating that both this cache and whoever requested the // creation of the replica hold a reference. So we don't need // to increment the reference count here. } else { // On failure, remove the waitable from the replicaInfoMap. Waitable<ShortCircuitReplicaInfo> waitableInMap = replicaInfoMap.get(key); if (waitableInMap == newWaitable) replicaInfoMap.remove(key); if (info.getInvalidTokenException() != null) { LOG.warn(this + ": could not load " + key + " due to InvalidToken " + "exception.", info.getInvalidTokenException()); } else { LOG.warn(this + ": failed to load " + key); } } newWaitable.provide(info); } finally { lock.unlock(); } return info; } private void startCacheCleanerThreadIfNeeded() { if (cacheCleaner == null) { cacheCleaner = new CacheCleaner(); long rateMs = cacheCleaner.getRateInMs(); ScheduledFuture<?> future = cleanerExecutor.scheduleAtFixedRate(cacheCleaner, rateMs, rateMs, TimeUnit.MILLISECONDS); cacheCleaner.setFuture(future); if (LOG.isDebugEnabled()) { LOG.debug(this + ": starting cache cleaner thread which will run " + "every " + rateMs + " ms"); } } } ClientMmap getOrCreateClientMmap(ShortCircuitReplica replica, boolean anchored) { Condition newCond; lock.lock(); try { while (replica.mmapData != null) { if (replica.mmapData instanceof MappedByteBuffer) { ref(replica); MappedByteBuffer mmap = (MappedByteBuffer)replica.mmapData; return new ClientMmap(replica, mmap, anchored); } else if (replica.mmapData instanceof Long) { long lastAttemptTimeMs = (Long)replica.mmapData; long delta = Time.monotonicNow() - lastAttemptTimeMs; if (delta < mmapRetryTimeoutMs) { if (LOG.isTraceEnabled()) { LOG.trace(this + ": can't create client mmap for " + replica + " because we failed to " + "create one just " + delta + "ms ago."); } return null; } if (LOG.isTraceEnabled()) { LOG.trace(this + ": retrying client mmap for " + replica + ", " + delta + " ms after the previous failure."); } } else if (replica.mmapData instanceof Condition) { Condition cond = (Condition)replica.mmapData; cond.awaitUninterruptibly(); } else { Preconditions.checkState(false, "invalid mmapData type " + replica.mmapData.getClass().getName()); } } newCond = lock.newCondition(); replica.mmapData = newCond; } finally { lock.unlock(); } MappedByteBuffer map = replica.loadMmapInternal(); lock.lock(); try { if (map == null) { replica.mmapData = Long.valueOf(Time.monotonicNow()); newCond.signalAll(); return null; } else { outstandingMmapCount++; replica.mmapData = map; ref(replica); newCond.signalAll(); return new ClientMmap(replica, map, anchored); } } finally { lock.unlock(); } } /** * Close the cache and free all associated resources. */ @Override public void close() { try { lock.lock(); if (closed) return; closed = true; LOG.info(this + ": closing"); maxNonMmappedEvictableLifespanMs = 0; maxEvictableMmapedSize = 0; // Close and join cacheCleaner thread. IOUtils.cleanup(LOG, cacheCleaner); // Purge all replicas. while (true) { Entry<Long, ShortCircuitReplica> entry = evictable.firstEntry(); if (entry == null) break; purge(entry.getValue()); } while (true) { Entry<Long, ShortCircuitReplica> entry = evictableMmapped.firstEntry(); if (entry == null) break; purge(entry.getValue()); } } finally { lock.unlock(); } IOUtils.cleanup(LOG, shmManager); } @VisibleForTesting // ONLY for testing public interface CacheVisitor { void visit(int numOutstandingMmaps, Map<ExtendedBlockId, ShortCircuitReplica> replicas, Map<ExtendedBlockId, InvalidToken> failedLoads, Map<Long, ShortCircuitReplica> evictable, Map<Long, ShortCircuitReplica> evictableMmapped); } @VisibleForTesting // ONLY for testing public void accept(CacheVisitor visitor) { lock.lock(); try { Map<ExtendedBlockId, ShortCircuitReplica> replicas = new HashMap<ExtendedBlockId, ShortCircuitReplica>(); Map<ExtendedBlockId, InvalidToken> failedLoads = new HashMap<ExtendedBlockId, InvalidToken>(); for (Entry<ExtendedBlockId, Waitable<ShortCircuitReplicaInfo>> entry : replicaInfoMap.entrySet()) { Waitable<ShortCircuitReplicaInfo> waitable = entry.getValue(); if (waitable.hasVal()) { if (waitable.getVal().getReplica() != null) { replicas.put(entry.getKey(), waitable.getVal().getReplica()); } else { // The exception may be null here, indicating a failed load that // isn't the result of an invalid block token. failedLoads.put(entry.getKey(), waitable.getVal().getInvalidTokenException()); } } } if (LOG.isDebugEnabled()) { StringBuilder builder = new StringBuilder(); builder.append("visiting ").append(visitor.getClass().getName()). append("with outstandingMmapCount=").append(outstandingMmapCount). append(", replicas="); String prefix = ""; for (Entry<ExtendedBlockId, ShortCircuitReplica> entry : replicas.entrySet()) { builder.append(prefix).append(entry.getValue()); prefix = ","; } prefix = ""; builder.append(", failedLoads="); for (Entry<ExtendedBlockId, InvalidToken> entry : failedLoads.entrySet()) { builder.append(prefix).append(entry.getValue()); prefix = ","; } prefix = ""; builder.append(", evictable="); for (Entry<Long, ShortCircuitReplica> entry : evictable.entrySet()) { builder.append(prefix).append(entry.getKey()). append(":").append(entry.getValue()); prefix = ","; } prefix = ""; builder.append(", evictableMmapped="); for (Entry<Long, ShortCircuitReplica> entry : evictableMmapped.entrySet()) { builder.append(prefix).append(entry.getKey()). append(":").append(entry.getValue()); prefix = ","; } LOG.debug(builder.toString()); } visitor.visit(outstandingMmapCount, replicas, failedLoads, evictable, evictableMmapped); } finally { lock.unlock(); } } @Override public String toString() { return "ShortCircuitCache(0x" + Integer.toHexString(System.identityHashCode(this)) + ")"; } /** * Allocate a new shared memory slot. * * @param datanode The datanode to allocate a shm slot with. * @param peer A peer connected to the datanode. * @param usedPeer Will be set to true if we use up the provided peer. * @param blockId The block id and block pool id of the block we're * allocating this slot for. * @param clientName The name of the DFSClient allocating the shared * memory. * @return Null if short-circuit shared memory is disabled; * a short-circuit memory slot otherwise. * @throws IOException An exception if there was an error talking to * the datanode. */ public Slot allocShmSlot(DatanodeInfo datanode, DomainPeer peer, MutableBoolean usedPeer, ExtendedBlockId blockId, String clientName) throws IOException { if (shmManager != null) { return shmManager.allocSlot(datanode, peer, usedPeer, blockId, clientName); } else { return null; } } /** * Free a slot immediately. * * ONLY use this if the DataNode is not yet aware of the slot. * * @param slot The slot to free. */ public void freeSlot(Slot slot) { Preconditions.checkState(shmManager != null); slot.makeInvalid(); shmManager.freeSlot(slot); } /** * Schedule a shared memory slot to be released. * * @param slot The slot to release. */ public void scheduleSlotReleaser(Slot slot) { Preconditions.checkState(shmManager != null); releaserExecutor.execute(new SlotReleaser(slot)); } @VisibleForTesting public DfsClientShmManager getDfsClientShmManager() { return shmManager; } }
apache/pinot
38,141
pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/CaseTransformFunction.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.core.operator.transform.function; import com.google.common.base.Preconditions; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.pinot.core.operator.ColumnContext; import org.apache.pinot.core.operator.blocks.ValueBlock; import org.apache.pinot.core.operator.transform.TransformResultMetadata; import org.apache.pinot.spi.data.FieldSpec.DataType; import org.apache.pinot.spi.utils.BytesUtils; import org.apache.pinot.spi.utils.CommonConstants.NullValuePlaceHolder; import org.apache.pinot.spi.utils.TimestampUtils; import org.roaringbitmap.RoaringBitmap; /** * The <code>CaseTransformFunction</code> class implements the CASE-WHEN-THEN-ELSE transformation. * <p> * The SQL Syntax is: CASE WHEN condition1 THEN result1 WHEN condition2 THEN result2 WHEN conditionN THEN resultN ELSE * result END; * <p> * Usage: case(${WHEN_STATEMENT_1}, ${THEN_EXPRESSION_1}, ..., ${WHEN_STATEMENT_N}, ${THEN_EXPRESSION_N}, ..., * ${ELSE_EXPRESSION}) * <p> * There are 2 * N + 1 arguments: * <code>WHEN_STATEMENT_$i</code> is a <code>BinaryOperatorTransformFunction</code> represents <code>condition$i</code> * <code>THEN_EXPRESSION_$i</code> is a <code>TransformFunction</code> represents <code>result$i</code> * <code>ELSE_EXPRESSION</code> is a <code>TransformFunction</code> represents <code>result</code> * <p> * ELSE_EXPRESSION can be omitted. When none of when statements is evaluated to be true, and there is no else * expression, we output null. Note that when statement is considered as false if it is evaluated to be null. * <p> * PostgreSQL documentation: <a href="https://www.postgresql.org/docs/current/typeconv-union-case.html">CASE</a> */ public class CaseTransformFunction extends ComputeDifferentlyWhenNullHandlingEnabledTransformFunction { public static final String FUNCTION_NAME = "case"; private List<TransformFunction> _whenStatements = new ArrayList<>(); private List<TransformFunction> _thenStatements = new ArrayList<>(); private TransformFunction _elseStatement; private TransformResultMetadata _resultMetadata; private boolean[] _computeThenStatements; private int[] _selectedResults; @Override public String getName() { return FUNCTION_NAME; } @Override public void init(List<TransformFunction> arguments, Map<String, ColumnContext> columnContextMap, boolean nullHandlingEnabled) { super.init(arguments, columnContextMap, nullHandlingEnabled); // Check that there are more than 2 arguments // Else statement can be omitted. if (arguments.size() < 2) { throw new IllegalArgumentException("At least two arguments are required for CASE-WHEN function"); } int numWhenStatements = arguments.size() / 2; _whenStatements = new ArrayList<>(numWhenStatements); _thenStatements = new ArrayList<>(numWhenStatements); // Alternating WHEN and THEN clause, last one ELSE for (int i = 0; i < numWhenStatements; i++) { _whenStatements.add(arguments.get(i * 2)); _thenStatements.add(arguments.get(i * 2 + 1)); } if (arguments.size() % 2 != 0 && !isNullLiteral(arguments.get(arguments.size() - 1))) { _elseStatement = arguments.get(arguments.size() - 1); } _resultMetadata = new TransformResultMetadata(calculateResultType(), true, false); _computeThenStatements = new boolean[numWhenStatements]; } private boolean isNullLiteral(TransformFunction function) { return function instanceof LiteralTransformFunction && ((LiteralTransformFunction) function).isNull(); } private DataType calculateResultType() { MutablePair<DataType, List<String>> typeAndUnresolvedLiterals = new MutablePair<>(DataType.UNKNOWN, new ArrayList<>()); for (TransformFunction thenStatement : _thenStatements) { upcast(typeAndUnresolvedLiterals, thenStatement); } if (_elseStatement != null) { upcast(typeAndUnresolvedLiterals, _elseStatement); } DataType dataType = typeAndUnresolvedLiterals.getLeft(); // If all inputs are of type UNKNOWN, resolve as type STRING return dataType != DataType.UNKNOWN ? dataType : DataType.STRING; } private void upcast(MutablePair<DataType, List<String>> currentTypeAndUnresolvedLiterals, TransformFunction newFunction) { TransformResultMetadata newMetadata = newFunction.getResultMetadata(); Preconditions.checkArgument(newMetadata.isSingleValue(), "Unsupported multi-value expression in THEN/ELSE clause"); DataType newType = newMetadata.getDataType(); if (newType == DataType.UNKNOWN) { return; } DataType currentType = currentTypeAndUnresolvedLiterals.getLeft(); if (currentType == newType) { return; } List<String> unresolvedLiterals = currentTypeAndUnresolvedLiterals.getRight(); // Treat string literals as UNKNOWN type. Resolve them when we get a non-UNKNOWN type. boolean isNewFunctionStringLiteral = newFunction instanceof LiteralTransformFunction && newType == DataType.STRING; if (currentType == DataType.UNKNOWN) { if (isNewFunctionStringLiteral) { unresolvedLiterals.add(((LiteralTransformFunction) newFunction).getStringLiteral()); } else { currentTypeAndUnresolvedLiterals.setLeft(newType); for (String unresolvedLiteral : unresolvedLiterals) { checkLiteral(newType, unresolvedLiteral); } unresolvedLiterals.clear(); } } else { assert unresolvedLiterals.isEmpty(); if (isNewFunctionStringLiteral) { checkLiteral(currentType, ((LiteralTransformFunction) newFunction).getStringLiteral()); } else { // Only allow upcast from numeric to numeric: INT -> LONG -> FLOAT -> DOUBLE -> BIG_DECIMAL Preconditions.checkArgument(currentType.isNumeric() && newType.isNumeric(), "Cannot upcast from %s to %s", currentType, newType); if (newType.ordinal() > currentType.ordinal()) { currentTypeAndUnresolvedLiterals.setLeft(newType); } } } } private void checkLiteral(DataType dataType, String literal) { switch (dataType) { case INT: try { Integer.parseInt(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for INT"); } break; case LONG: try { Long.parseLong(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for LONG"); } break; case FLOAT: try { Float.parseFloat(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for FLOAT"); } break; case DOUBLE: try { Double.parseDouble(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for DOUBLE"); } break; case BIG_DECIMAL: try { new BigDecimal(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for BIG_DECIMAL"); } break; case BOOLEAN: Preconditions.checkArgument( literal.equalsIgnoreCase("true") || literal.equalsIgnoreCase("false") || literal.equals("1") || literal.equals("0"), "Invalid literal: %s for BOOLEAN", literal); break; case TIMESTAMP: try { TimestampUtils.toTimestamp(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for TIMESTAMP"); } break; case STRING: case JSON: break; case BYTES: try { BytesUtils.toBytes(literal); } catch (Exception e) { throw new IllegalArgumentException("Invalid literal: " + literal + " for BYTES"); } break; default: throw new IllegalStateException("Unsupported data type: " + dataType); } } @Override public TransformResultMetadata getResultMetadata() { return _resultMetadata; } /** * Evaluate the ValueBlock for the WHEN statements, returns an array with the index(1 to N) of matched WHEN clause -1 * means there is no match. */ private int[] getSelectedArray(ValueBlock valueBlock, boolean nullHandlingEnabled) { int numDocs = valueBlock.getNumDocs(); if (_selectedResults == null || _selectedResults.length < numDocs) { _selectedResults = new int[numDocs]; } Arrays.fill(_selectedResults, -1); Arrays.fill(_computeThenStatements, false); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); int numWhenStatements = _whenStatements.size(); for (int i = 0; i < numWhenStatements; i++) { TransformFunction whenStatement = _whenStatements.get(i); int[] conditions = getWhenConditions(whenStatement, valueBlock, nullHandlingEnabled); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { if (conditions[docId] == 1) { unselectedDocs.clear(docId); _selectedResults[docId] = i; } } if (unselectedDocs.isEmpty()) { break; } } // try to prune clauses now for (int i = 0; i < numDocs; i++) { if (_selectedResults[i] != -1) { _computeThenStatements[_selectedResults[i]] = true; } } return _selectedResults; } // Returns an array of valueBlock length to indicate whether a row is selected or not. // When nullHandlingEnabled is set to true, we also check whether the row is null and set to false if null. private static int[] getWhenConditions(TransformFunction whenStatement, ValueBlock valueBlock, boolean nullHandlingEnabled) { if (!nullHandlingEnabled) { return whenStatement.transformToIntValuesSV(valueBlock); } int[] intResult = whenStatement.transformToIntValuesSV(valueBlock); RoaringBitmap bitmap = whenStatement.getNullBitmap(valueBlock); if (bitmap != null) { for (int i : bitmap) { intResult[i] = 0; } } return intResult; } @Override protected int[] transformToIntValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initIntValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, int[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToIntValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _intValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _intValuesSV[docId] = NullValuePlaceHolder.INT; } } else { int[] intValues = _elseStatement.transformToIntValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _intValuesSV[docId] = intValues[docId]; } } } return _intValuesSV; } @Override protected int[] transformToIntValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initIntValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<int[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToIntValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<int[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _intValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _intValuesSV[docId] = NullValuePlaceHolder.INT; bitmap.add(docId); } } else { int[] intValues = _elseStatement.transformToIntValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _intValuesSV[docId] = intValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _intValuesSV; } @Override protected long[] transformToLongValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initLongValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, long[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToLongValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _longValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _longValuesSV[docId] = NullValuePlaceHolder.LONG; } } else { long[] longValues = _elseStatement.transformToLongValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _longValuesSV[docId] = longValues[docId]; } } } return _longValuesSV; } @Override protected long[] transformToLongValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initLongValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<long[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToLongValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<long[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _longValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _longValuesSV[docId] = NullValuePlaceHolder.LONG; bitmap.add(docId); } } else { long[] longValues = _elseStatement.transformToLongValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _longValuesSV[docId] = longValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _longValuesSV; } @Override protected float[] transformToFloatValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initFloatValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, float[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToFloatValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _floatValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _floatValuesSV[docId] = NullValuePlaceHolder.FLOAT; } } else { float[] floatValues = _elseStatement.transformToFloatValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _floatValuesSV[docId] = floatValues[docId]; } } } return _floatValuesSV; } @Override protected float[] transformToFloatValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initFloatValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<float[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToFloatValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<float[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _floatValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _floatValuesSV[docId] = NullValuePlaceHolder.FLOAT; bitmap.add(docId); } } else { float[] floatValues = _elseStatement.transformToFloatValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _floatValuesSV[docId] = floatValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _floatValuesSV; } @Override protected double[] transformToDoubleValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initDoubleValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, double[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToDoubleValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _doubleValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _doubleValuesSV[docId] = NullValuePlaceHolder.DOUBLE; } } else { double[] doubleValues = _elseStatement.transformToDoubleValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _doubleValuesSV[docId] = doubleValues[docId]; } } } return _doubleValuesSV; } @Override protected double[] transformToDoubleValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initDoubleValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<double[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToDoubleValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<double[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _doubleValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _doubleValuesSV[docId] = NullValuePlaceHolder.DOUBLE; bitmap.add(docId); } } else { double[] doubleValues = _elseStatement.transformToDoubleValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _doubleValuesSV[docId] = doubleValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _doubleValuesSV; } @Override protected BigDecimal[] transformToBigDecimalValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initBigDecimalValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, BigDecimal[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToBigDecimalValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _bigDecimalValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bigDecimalValuesSV[docId] = NullValuePlaceHolder.BIG_DECIMAL; } } else { BigDecimal[] bigDecimalValues = _elseStatement.transformToBigDecimalValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bigDecimalValuesSV[docId] = bigDecimalValues[docId]; } } } return _bigDecimalValuesSV; } @Override protected BigDecimal[] transformToBigDecimalValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initBigDecimalValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<BigDecimal[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToBigDecimalValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<BigDecimal[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _bigDecimalValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bigDecimalValuesSV[docId] = NullValuePlaceHolder.BIG_DECIMAL; bitmap.add(docId); } } else { BigDecimal[] bigDecimalValues = _elseStatement.transformToBigDecimalValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bigDecimalValuesSV[docId] = bigDecimalValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _bigDecimalValuesSV; } @Override protected String[] transformToStringValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initStringValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, String[]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToStringValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _stringValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _stringValuesSV[docId] = NullValuePlaceHolder.STRING; } } else { String[] stringValues = _elseStatement.transformToStringValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _stringValuesSV[docId] = stringValues[docId]; } } } return _stringValuesSV; } @Override protected String[] transformToStringValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initStringValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<String[], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToStringValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<String[], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _stringValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _stringValuesSV[docId] = NullValuePlaceHolder.STRING; bitmap.add(docId); } } else { String[] stringValues = _elseStatement.transformToStringValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _stringValuesSV[docId] = stringValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _stringValuesSV; } @Override protected byte[][] transformToBytesValuesSVUsingValue(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, false); int numDocs = valueBlock.getNumDocs(); initBytesValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, byte[][]> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).transformToBytesValuesSV(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { _bytesValuesSV[docId] = thenStatementsIndexToValues.get(selected[docId])[docId]; unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bytesValuesSV[docId] = NullValuePlaceHolder.BYTES; } } else { byte[][] bytesValues = _elseStatement.transformToBytesValuesSV(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bytesValuesSV[docId] = bytesValues[docId]; } } } return _bytesValuesSV; } @Override protected byte[][] transformToBytesValuesSVUsingValueAndNull(ValueBlock valueBlock) { final RoaringBitmap bitmap = new RoaringBitmap(); int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); initStringValuesSV(numDocs); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); Map<Integer, Pair<byte[][], RoaringBitmap>> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, ImmutablePair.of(_thenStatements.get(i).transformToBytesValuesSV(valueBlock), _thenStatements.get(i).getNullBitmap(valueBlock))); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { Pair<byte[][], RoaringBitmap> nullValuePair = thenStatementsIndexToValues.get(selected[docId]); _bytesValuesSV[docId] = nullValuePair.getLeft()[docId]; RoaringBitmap nullBitmap = nullValuePair.getRight(); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); if (unselectedDocs.isEmpty()) { break; } } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bytesValuesSV[docId] = NullValuePlaceHolder.BYTES; bitmap.add(docId); } } else { byte[][] bytesValues = _elseStatement.transformToBytesValuesSV(valueBlock); RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { _bytesValuesSV[docId] = bytesValues[docId]; if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } return _bytesValuesSV; } @Override public RoaringBitmap getNullBitmap(ValueBlock valueBlock) { int[] selected = getSelectedArray(valueBlock, true); int numDocs = valueBlock.getNumDocs(); int numThenStatements = _thenStatements.size(); BitSet unselectedDocs = new BitSet(); unselectedDocs.set(0, numDocs); final RoaringBitmap bitmap = new RoaringBitmap(); Map<Integer, RoaringBitmap> thenStatementsIndexToValues = new HashMap<>(); for (int i = 0; i < numThenStatements; i++) { if (_computeThenStatements[i]) { thenStatementsIndexToValues.put(i, _thenStatements.get(i).getNullBitmap(valueBlock)); } } for (int docId = 0; docId < numDocs; docId++) { if (selected[docId] >= 0) { RoaringBitmap nullBitmap = thenStatementsIndexToValues.get(selected[docId]); if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } unselectedDocs.clear(docId); } } if (!unselectedDocs.isEmpty()) { if (_elseStatement == null) { for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { bitmap.add(docId); } } else { RoaringBitmap nullBitmap = _elseStatement.getNullBitmap(valueBlock); for (int docId = unselectedDocs.nextSetBit(0); docId >= 0; docId = unselectedDocs.nextSetBit(docId + 1)) { if (nullBitmap != null && nullBitmap.contains(docId)) { bitmap.add(docId); } } } } if (bitmap.isEmpty()) { return null; } return bitmap; } }
googleapis/google-cloud-java
37,854
java-shopping-merchant-datasources/proto-google-shopping-merchant-datasources-v1beta/src/main/java/com/google/shopping/merchant/datasources/v1beta/UpdateDataSourceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/datasources/v1beta/datasources.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.datasources.v1beta; /** * * * <pre> * Request message for the UpdateDataSource method. * </pre> * * Protobuf type {@code google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest} */ public final class UpdateDataSourceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) UpdateDataSourceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateDataSourceRequest.newBuilder() to construct. private UpdateDataSourceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateDataSourceRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateDataSourceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.datasources.v1beta.DataSourcesProto .internal_static_google_shopping_merchant_datasources_v1beta_UpdateDataSourceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.datasources.v1beta.DataSourcesProto .internal_static_google_shopping_merchant_datasources_v1beta_UpdateDataSourceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest.class, com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest.Builder.class); } private int bitField0_; public static final int DATA_SOURCE_FIELD_NUMBER = 1; private com.google.shopping.merchant.datasources.v1beta.DataSource dataSource_; /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the dataSource field is set. */ @java.lang.Override public boolean hasDataSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The dataSource. */ @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.DataSource getDataSource() { return dataSource_ == null ? com.google.shopping.merchant.datasources.v1beta.DataSource.getDefaultInstance() : dataSource_; } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.DataSourceOrBuilder getDataSourceOrBuilder() { return dataSource_ == null ? com.google.shopping.merchant.datasources.v1beta.DataSource.getDefaultInstance() : dataSource_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getDataSource()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDataSource()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest)) { return super.equals(obj); } com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest other = (com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) obj; if (hasDataSource() != other.hasDataSource()) return false; if (hasDataSource()) { if (!getDataSource().equals(other.getDataSource())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDataSource()) { hash = (37 * hash) + DATA_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getDataSource().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the UpdateDataSource method. * </pre> * * Protobuf type {@code google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.datasources.v1beta.DataSourcesProto .internal_static_google_shopping_merchant_datasources_v1beta_UpdateDataSourceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.datasources.v1beta.DataSourcesProto .internal_static_google_shopping_merchant_datasources_v1beta_UpdateDataSourceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest.class, com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest.Builder .class); } // Construct using // com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDataSourceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; dataSource_ = null; if (dataSourceBuilder_ != null) { dataSourceBuilder_.dispose(); dataSourceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.datasources.v1beta.DataSourcesProto .internal_static_google_shopping_merchant_datasources_v1beta_UpdateDataSourceRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest getDefaultInstanceForType() { return com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest build() { com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest buildPartial() { com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest result = new com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.dataSource_ = dataSourceBuilder_ == null ? dataSource_ : dataSourceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) { return mergeFrom( (com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest other) { if (other == com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest .getDefaultInstance()) return this; if (other.hasDataSource()) { mergeDataSource(other.getDataSource()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getDataSourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.shopping.merchant.datasources.v1beta.DataSource dataSource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.datasources.v1beta.DataSource, com.google.shopping.merchant.datasources.v1beta.DataSource.Builder, com.google.shopping.merchant.datasources.v1beta.DataSourceOrBuilder> dataSourceBuilder_; /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the dataSource field is set. */ public boolean hasDataSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The dataSource. */ public com.google.shopping.merchant.datasources.v1beta.DataSource getDataSource() { if (dataSourceBuilder_ == null) { return dataSource_ == null ? com.google.shopping.merchant.datasources.v1beta.DataSource.getDefaultInstance() : dataSource_; } else { return dataSourceBuilder_.getMessage(); } } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDataSource(com.google.shopping.merchant.datasources.v1beta.DataSource value) { if (dataSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dataSource_ = value; } else { dataSourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDataSource( com.google.shopping.merchant.datasources.v1beta.DataSource.Builder builderForValue) { if (dataSourceBuilder_ == null) { dataSource_ = builderForValue.build(); } else { dataSourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDataSource( com.google.shopping.merchant.datasources.v1beta.DataSource value) { if (dataSourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && dataSource_ != null && dataSource_ != com.google.shopping.merchant.datasources.v1beta.DataSource .getDefaultInstance()) { getDataSourceBuilder().mergeFrom(value); } else { dataSource_ = value; } } else { dataSourceBuilder_.mergeFrom(value); } if (dataSource_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDataSource() { bitField0_ = (bitField0_ & ~0x00000001); dataSource_ = null; if (dataSourceBuilder_ != null) { dataSourceBuilder_.dispose(); dataSourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.datasources.v1beta.DataSource.Builder getDataSourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getDataSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.datasources.v1beta.DataSourceOrBuilder getDataSourceOrBuilder() { if (dataSourceBuilder_ != null) { return dataSourceBuilder_.getMessageOrBuilder(); } else { return dataSource_ == null ? com.google.shopping.merchant.datasources.v1beta.DataSource.getDefaultInstance() : dataSource_; } } /** * * * <pre> * Required. The data source resource to update. * </pre> * * <code> * .google.shopping.merchant.datasources.v1beta.DataSource data_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.datasources.v1beta.DataSource, com.google.shopping.merchant.datasources.v1beta.DataSource.Builder, com.google.shopping.merchant.datasources.v1beta.DataSourceOrBuilder> getDataSourceFieldBuilder() { if (dataSourceBuilder_ == null) { dataSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.datasources.v1beta.DataSource, com.google.shopping.merchant.datasources.v1beta.DataSource.Builder, com.google.shopping.merchant.datasources.v1beta.DataSourceOrBuilder>( getDataSource(), getParentForChildren(), isClean()); dataSource_ = null; } return dataSourceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of data source fields to be updated. * * Fields specified in the update mask without a value specified in the * body will be deleted from the data source. * * Providing special "*" value for full data source replacement is not * supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest) private static final com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest(); } public static com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateDataSourceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateDataSourceRequest>() { @java.lang.Override public UpdateDataSourceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateDataSourceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateDataSourceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.datasources.v1beta.UpdateDataSourceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,831
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/CreateIntentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3/intent.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3; /** * * * <pre> * The request message for * [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateIntentRequest} */ public final class CreateIntentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.CreateIntentRequest) CreateIntentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateIntentRequest.newBuilder() to construct. private CreateIntentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateIntentRequest() { parent_ = ""; languageCode_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateIntentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.IntentProto .internal_static_google_cloud_dialogflow_cx_v3_CreateIntentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.IntentProto .internal_static_google_cloud_dialogflow_cx_v3_CreateIntentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.class, com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INTENT_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.cx.v3.Intent intent_; /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the intent field is set. */ @java.lang.Override public boolean hasIntent() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The intent. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.Intent getIntent() { return intent_ == null ? com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance() : intent_; } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3.IntentOrBuilder getIntentOrBuilder() { return intent_ == null ? com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance() : intent_; } public static final int LANGUAGE_CODE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object languageCode_ = ""; /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The languageCode. */ @java.lang.Override public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The bytes for languageCode. */ @java.lang.Override public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getIntent()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, languageCode_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getIntent()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, languageCode_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.CreateIntentRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3.CreateIntentRequest other = (com.google.cloud.dialogflow.cx.v3.CreateIntentRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasIntent() != other.hasIntent()) return false; if (hasIntent()) { if (!getIntent().equals(other.getIntent())) return false; } if (!getLanguageCode().equals(other.getLanguageCode())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasIntent()) { hash = (37 * hash) + INTENT_FIELD_NUMBER; hash = (53 * hash) + getIntent().hashCode(); } hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3.CreateIntentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [Intents.CreateIntent][google.cloud.dialogflow.cx.v3.Intents.CreateIntent]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3.CreateIntentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.CreateIntentRequest) com.google.cloud.dialogflow.cx.v3.CreateIntentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3.IntentProto .internal_static_google_cloud_dialogflow_cx_v3_CreateIntentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3.IntentProto .internal_static_google_cloud_dialogflow_cx_v3_CreateIntentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.class, com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIntentFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; intent_ = null; if (intentBuilder_ != null) { intentBuilder_.dispose(); intentBuilder_ = null; } languageCode_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3.IntentProto .internal_static_google_cloud_dialogflow_cx_v3_CreateIntentRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateIntentRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateIntentRequest build() { com.google.cloud.dialogflow.cx.v3.CreateIntentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateIntentRequest buildPartial() { com.google.cloud.dialogflow.cx.v3.CreateIntentRequest result = new com.google.cloud.dialogflow.cx.v3.CreateIntentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3.CreateIntentRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.intent_ = intentBuilder_ == null ? intent_ : intentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.languageCode_ = languageCode_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3.CreateIntentRequest) { return mergeFrom((com.google.cloud.dialogflow.cx.v3.CreateIntentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.CreateIntentRequest other) { if (other == com.google.cloud.dialogflow.cx.v3.CreateIntentRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasIntent()) { mergeIntent(other.getIntent()); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getIntentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { languageCode_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The agent to create an intent for. * Format: `projects/&lt;ProjectID&gt;/locations/&lt;LocationID&gt;/agents/&lt;AgentID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dialogflow.cx.v3.Intent intent_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Intent, com.google.cloud.dialogflow.cx.v3.Intent.Builder, com.google.cloud.dialogflow.cx.v3.IntentOrBuilder> intentBuilder_; /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the intent field is set. */ public boolean hasIntent() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The intent. */ public com.google.cloud.dialogflow.cx.v3.Intent getIntent() { if (intentBuilder_ == null) { return intent_ == null ? com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance() : intent_; } else { return intentBuilder_.getMessage(); } } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIntent(com.google.cloud.dialogflow.cx.v3.Intent value) { if (intentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } intent_ = value; } else { intentBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIntent(com.google.cloud.dialogflow.cx.v3.Intent.Builder builderForValue) { if (intentBuilder_ == null) { intent_ = builderForValue.build(); } else { intentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeIntent(com.google.cloud.dialogflow.cx.v3.Intent value) { if (intentBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && intent_ != null && intent_ != com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance()) { getIntentBuilder().mergeFrom(value); } else { intent_ = value; } } else { intentBuilder_.mergeFrom(value); } if (intent_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearIntent() { bitField0_ = (bitField0_ & ~0x00000002); intent_ = null; if (intentBuilder_ != null) { intentBuilder_.dispose(); intentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.Intent.Builder getIntentBuilder() { bitField0_ |= 0x00000002; onChanged(); return getIntentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3.IntentOrBuilder getIntentOrBuilder() { if (intentBuilder_ != null) { return intentBuilder_.getMessageOrBuilder(); } else { return intent_ == null ? com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance() : intent_; } } /** * * * <pre> * Required. The intent to create. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3.Intent intent = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Intent, com.google.cloud.dialogflow.cx.v3.Intent.Builder, com.google.cloud.dialogflow.cx.v3.IntentOrBuilder> getIntentFieldBuilder() { if (intentBuilder_ == null) { intentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3.Intent, com.google.cloud.dialogflow.cx.v3.Intent.Builder, com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>( getIntent(), getParentForChildren(), isClean()); intent_ = null; } return intentBuilder_; } private java.lang.Object languageCode_ = ""; /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The languageCode. */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return The bytes for languageCode. */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @param value The languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @return This builder for chaining. */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The language of the following fields in `intent`: * * * `Intent.training_phrases.parts.text` * * If not specified, the agent's default language is used. * [Many * languages](https://cloud.google.com/dialogflow/cx/docs/reference/language) * are supported. * Note: languages must be enabled in the agent before they can be used. * </pre> * * <code>string language_code = 3;</code> * * @param value The bytes for languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.CreateIntentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.CreateIntentRequest) private static final com.google.cloud.dialogflow.cx.v3.CreateIntentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.CreateIntentRequest(); } public static com.google.cloud.dialogflow.cx.v3.CreateIntentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateIntentRequest> PARSER = new com.google.protobuf.AbstractParser<CreateIntentRequest>() { @java.lang.Override public CreateIntentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateIntentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateIntentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3.CreateIntentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,858
java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/CreateCatalogRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.biglake.v1alpha1; /** * * * <pre> * Request message for the CreateCatalog method. * </pre> * * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest} */ public final class CreateCatalogRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) CreateCatalogRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateCatalogRequest.newBuilder() to construct. private CreateCatalogRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateCatalogRequest() { parent_ = ""; catalogId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateCatalogRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.class, com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CATALOG_FIELD_NUMBER = 2; private com.google.cloud.bigquery.biglake.v1alpha1.Catalog catalog_; /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the catalog field is set. */ @java.lang.Override public boolean hasCatalog() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The catalog. */ @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog() { return catalog_ == null ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() : catalog_; } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogOrBuilder() { return catalog_ == null ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() : catalog_; } public static final int CATALOG_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object catalogId_ = ""; /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The catalogId. */ @java.lang.Override public java.lang.String getCatalogId() { java.lang.Object ref = catalogId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); catalogId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for catalogId. */ @java.lang.Override public com.google.protobuf.ByteString getCatalogIdBytes() { java.lang.Object ref = catalogId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); catalogId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getCatalog()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(catalogId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, catalogId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCatalog()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(catalogId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, catalogId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest)) { return super.equals(obj); } com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest other = (com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasCatalog() != other.hasCatalog()) return false; if (hasCatalog()) { if (!getCatalog().equals(other.getCatalog())) return false; } if (!getCatalogId().equals(other.getCatalogId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasCatalog()) { hash = (37 * hash) + CATALOG_FIELD_NUMBER; hash = (53 * hash) + getCatalog().hashCode(); } hash = (37 * hash) + CATALOG_ID_FIELD_NUMBER; hash = (53 * hash) + getCatalogId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the CreateCatalog method. * </pre> * * Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.class, com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.Builder.class); } // Construct using com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCatalogFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; catalog_ = null; if (catalogBuilder_ != null) { catalogBuilder_.dispose(); catalogBuilder_ = null; } catalogId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto .internal_static_google_cloud_bigquery_biglake_v1alpha1_CreateCatalogRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest build() { com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest buildPartial() { com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result = new com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.catalog_ = catalogBuilder_ == null ? catalog_ : catalogBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.catalogId_ = catalogId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) { return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest other) { if (other == com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCatalog()) { mergeCatalog(other.getCatalog()); } if (!other.getCatalogId().isEmpty()) { catalogId_ = other.catalogId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getCatalogFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { catalogId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource where this catalog will be created. * Format: projects/{project_id_or_number}/locations/{location_id} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.bigquery.biglake.v1alpha1.Catalog catalog_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.biglake.v1alpha1.Catalog, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> catalogBuilder_; /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the catalog field is set. */ public boolean hasCatalog() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The catalog. */ public com.google.cloud.bigquery.biglake.v1alpha1.Catalog getCatalog() { if (catalogBuilder_ == null) { return catalog_ == null ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() : catalog_; } else { return catalogBuilder_.getMessage(); } } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCatalog(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { if (catalogBuilder_ == null) { if (value == null) { throw new NullPointerException(); } catalog_ = value; } else { catalogBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCatalog( com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder builderForValue) { if (catalogBuilder_ == null) { catalog_ = builderForValue.build(); } else { catalogBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCatalog(com.google.cloud.bigquery.biglake.v1alpha1.Catalog value) { if (catalogBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && catalog_ != null && catalog_ != com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance()) { getCatalogBuilder().mergeFrom(value); } else { catalog_ = value; } } else { catalogBuilder_.mergeFrom(value); } if (catalog_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCatalog() { bitField0_ = (bitField0_ & ~0x00000002); catalog_ = null; if (catalogBuilder_ != null) { catalogBuilder_.dispose(); catalogBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder getCatalogBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCatalogFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder getCatalogOrBuilder() { if (catalogBuilder_ != null) { return catalogBuilder_.getMessageOrBuilder(); } else { return catalog_ == null ? com.google.cloud.bigquery.biglake.v1alpha1.Catalog.getDefaultInstance() : catalog_; } } /** * * * <pre> * Required. The catalog to create. * The `name` field does not need to be provided. * </pre> * * <code> * .google.cloud.bigquery.biglake.v1alpha1.Catalog catalog = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.biglake.v1alpha1.Catalog, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder> getCatalogFieldBuilder() { if (catalogBuilder_ == null) { catalogBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.biglake.v1alpha1.Catalog, com.google.cloud.bigquery.biglake.v1alpha1.Catalog.Builder, com.google.cloud.bigquery.biglake.v1alpha1.CatalogOrBuilder>( getCatalog(), getParentForChildren(), isClean()); catalog_ = null; } return catalogBuilder_; } private java.lang.Object catalogId_ = ""; /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The catalogId. */ public java.lang.String getCatalogId() { java.lang.Object ref = catalogId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); catalogId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for catalogId. */ public com.google.protobuf.ByteString getCatalogIdBytes() { java.lang.Object ref = catalogId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); catalogId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The catalogId to set. * @return This builder for chaining. */ public Builder setCatalogId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } catalogId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearCatalogId() { catalogId_ = getDefaultInstance().getCatalogId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the catalog, which will become the final * component of the catalog's resource name. * </pre> * * <code>string catalog_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for catalogId to set. * @return This builder for chaining. */ public Builder setCatalogIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); catalogId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest) private static final com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest(); } public static com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateCatalogRequest> PARSER = new com.google.protobuf.AbstractParser<CreateCatalogRequest>() { @java.lang.Override public CreateCatalogRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateCatalogRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateCatalogRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.biglake.v1alpha1.CreateCatalogRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ofbiz
38,203
framework/entity/src/main/java/org/apache/ofbiz/entity/Delegator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ofbiz.entity; import java.net.URL; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.parsers.ParserConfigurationException; import org.apache.ofbiz.entity.cache.Cache; import org.apache.ofbiz.entity.condition.EntityCondition; import org.apache.ofbiz.entity.datasource.GenericHelper; import org.apache.ofbiz.entity.datasource.GenericHelperInfo; import org.apache.ofbiz.entity.eca.EntityEcaHandler; import org.apache.ofbiz.entity.model.DynamicViewEntity; import org.apache.ofbiz.entity.model.ModelEntity; import org.apache.ofbiz.entity.model.ModelField; import org.apache.ofbiz.entity.model.ModelFieldType; import org.apache.ofbiz.entity.model.ModelFieldTypeReader; import org.apache.ofbiz.entity.model.ModelGroupReader; import org.apache.ofbiz.entity.model.ModelReader; import org.apache.ofbiz.entity.util.DistributedCacheClear; import org.apache.ofbiz.entity.util.EntityCrypto; import org.apache.ofbiz.entity.util.EntityFindOptions; import org.apache.ofbiz.entity.util.EntityListIterator; import org.apache.ofbiz.entity.util.EntityStoreOptions; import org.apache.ofbiz.entity.util.SequenceUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.SAXException; public interface Delegator { enum OperationType {INSERT, UPDATE, DELETE} void clearAllCacheLinesByDummyPK(Collection<GenericPK> dummyPKs); void clearAllCacheLinesByValue(Collection<GenericValue> values); /** * This method is a shortcut to completely clear all entity engine caches. * For performance reasons this should not be called very often. */ void clearAllCaches(); void clearAllCaches(boolean distribute); /** * Remove a CACHED Generic Entity from the cache by its primary key, does * NOT check to see if the passed GenericPK is a complete primary key. Also * tries to clear the corresponding all cache entry. * * @param primaryKey * The primary key to clear by. */ void clearCacheLine(GenericPK primaryKey); void clearCacheLine(GenericPK primaryKey, boolean distribute); /** * Remove a CACHED GenericValue from as many caches as it can. Automatically * tries to remove entries from the all cache, the by primary key cache, and * the by and cache. This is the ONLY method that tries to clear * automatically from the by and cache. * * @param value * The GenericValue to clear by. */ void clearCacheLine(GenericValue value); void clearCacheLine(GenericValue value, boolean distribute); /** * Remove all CACHED Generic Entity (List) from the cache * * @param entityName * The Name of the Entity as defined in the entity XML file */ void clearCacheLine(String entityName); /** * Remove a CACHED Generic Entity (List) from the cache, either a PK, ByAnd, * or All * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values */ void clearCacheLine(String entityName, Map<String, ? extends Object> fields); /** * Remove a CACHED Generic Entity (List) from the cache, either a PK, ByAnd, * or All * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values */ void clearCacheLine(String entityName, Object... fields); void clearCacheLineByCondition(String entityName, EntityCondition condition); void clearCacheLineByCondition(String entityName, EntityCondition condition, boolean distribute); /** * Remove a CACHED Generic Entity from the cache by its primary key. Checks * to see if the passed GenericPK is a complete primary key, if it is then * the cache line will be removed from the primaryKeyCache; if it is NOT a * complete primary key it will remove the cache line from the andCache. If * the fields map is empty, then the allCache for the entity will be * cleared. * * @param dummyPK * The dummy primary key to clear by. */ void clearCacheLineFlexible(GenericEntity dummyPK); void clearCacheLineFlexible(GenericEntity dummyPK, boolean distribute); Delegator cloneDelegator(); Delegator cloneDelegator(String delegatorName); /** * Creates a Entity in the form of a GenericValue and write it to the * datasource * * @param primaryKey * The GenericPK to create a value in the datasource from * @return GenericValue instance containing the new instance */ GenericValue create(GenericPK primaryKey) throws GenericEntityException; /** * Creates a Entity in the form of a GenericValue and write it to the * datasource * * @param value * The GenericValue to create a value in the datasource from * @return GenericValue instance containing the new instance */ GenericValue create(GenericValue value) throws GenericEntityException; /** * Creates a Entity in the form of a GenericValue and write it to the * database * * @return GenericValue instance containing the new instance */ GenericValue create(String entityName, Map<String, ? extends Object> fields) throws GenericEntityException; /** * Creates a Entity in the form of a GenericValue and write it to the * database * * @return GenericValue instance containing the new instance */ GenericValue create(String entityName, Object... fields) throws GenericEntityException; /** * Creates or stores an Entity * * @param value * The GenericValue instance containing the new or existing * instance * @return GenericValue instance containing the new or updated instance */ GenericValue createOrStore(GenericValue value) throws GenericEntityException; /** * Sets the sequenced ID (for entity with one primary key field ONLY), and * then does a create in the database as normal. The reason to do it this * way is that it will retry and fix the sequence if somehow the sequencer * is in a bad state and returning a value that already exists. * * @param value * The GenericValue to create a value in the datasource from * @return GenericValue instance containing the new instance */ GenericValue createSetNextSeqId(GenericValue value) throws GenericEntityException; /** * Creates a Entity in the form of a GenericValue and write it to the * database * * @return GenericValue instance containing the new instance */ GenericValue createSingle(String entityName, Object singlePkValue) throws GenericEntityException; @Deprecated Object decryptFieldValue(String entityName, String encValue) throws EntityCryptoException; Object decryptFieldValue(String entityName, ModelField.EncryptMethod encryptMethod, String encValue) throws EntityCryptoException; Object encryptFieldValue(String entityName, ModelField.EncryptMethod encryptMethod, Object fieldValue) throws EntityCryptoException; /** * Finds GenericValues by the conditions specified in the EntityCondition * object, the the EntityCondition javadoc for more details. * * @param entityName * The name of the Entity as defined in the entity XML file * @param whereEntityCondition * The EntityCondition object that specifies how to constrain * this query before any groupings are done (if this is a view * entity with group-by aliases) * @param havingEntityCondition * The EntityCondition object that specifies how to constrain * this query after any groupings are done (if this is a view * entity with group-by aliases) * @param fieldsToSelect * The fields of the named entity to get from the database; if * empty or null all fields will be retreived * @param orderBy * The fields of the named entity to order the query by; * optionally add a " ASC" for ascending or " DESC" for * descending * @param findOptions * An instance of EntityFindOptions that specifies advanced query * options. See the EntityFindOptions JavaDoc for more details. * @return EntityListIterator representing the result of the query: NOTE * THAT THIS MUST BE CLOSED (preferably in a finally block) WHEN YOU * ARE DONE WITH IT, AND DON'T LEAVE IT OPEN TOO LONG BEACUSE IT * WILL MAINTAIN A DATABASE CONNECTION. */ EntityListIterator find(String entityName, EntityCondition whereEntityCondition, EntityCondition havingEntityCondition, Set<String> fieldsToSelect, List<String> orderBy, EntityFindOptions findOptions) throws GenericEntityException; /** * Finds all Generic entities * * @param entityName * The Name of the Entity as defined in the entity XML file * @param useCache * Whether to cache the results * @return List containing all Generic entities */ List<GenericValue> findAll(String entityName, boolean useCache) throws GenericEntityException; /** * Finds Generic Entity records by all of the specified fields (ie: combined * using AND), looking first in the cache; uses orderBy for lookup, but only * keys results on the entityName and fields * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values * @param orderBy * The fields of the named entity to order the query by; * optionally add a " ASC" for ascending or " DESC" for * descending * @param useCache * Whether to cache the results * @return List of GenericValue instances that match the query */ List<GenericValue> findByAnd(String entityName, Map<String, ? extends Object> fields, List<String> orderBy, boolean useCache) throws GenericEntityException; /** * Find a Generic Entity by its Primary Key and only returns the values * requested by the passed keys (names). * * @param primaryKey * The primary key to find by. * @param keys * The keys, or names, of the values to retrieve; only these * values will be retrieved * @return The GenericValue corresponding to the primaryKey */ GenericValue findByPrimaryKeyPartial(GenericPK primaryKey, Set<String> keys) throws GenericEntityException; /** * Gets the hit count of GenericValues for the given EntityCondition objects. * * @param entityName * @param whereEntityCondition * @param havingEntityCondition * @param findOptions * @return long value with hit count * @throws GenericEntityException */ long findCountByCondition(String entityName, EntityCondition whereEntityCondition, EntityCondition havingEntityCondition, EntityFindOptions findOptions) throws GenericEntityException; /** * Finds GenericValues by the conditions specified in the EntityCondition * object, the the EntityCondition javadoc for more details. * * @param entityName * The name of the Entity as defined in the entity XML file * @param entityCondition * The EntityCondition object that specifies how to constrain * this query before any groupings are done (if this is a view * entity with group-by aliases) * @param fieldsToSelect * The fields of the named entity to get from the database; if * empty or null all fields will be retrieved * @param orderBy * The fields of the named entity to order the query by; * optionally add a " ASC" for ascending or " DESC" for * descending * @param findOptions * An instance of EntityFindOptions that specifies advanced query * options. See the EntityFindOptions JavaDoc for more details. * @return List of GenericValue objects representing the result */ List<GenericValue> findList(String entityName, EntityCondition entityCondition, Set<String> fieldsToSelect, List<String> orderBy, EntityFindOptions findOptions, boolean useCache) throws GenericEntityException; /** * Finds GenericValues by the conditions specified in the EntityCondition * object, the the EntityCondition javadoc for more details. * * @param dynamicViewEntity * The DynamicViewEntity to use for the entity model for this * query; generally created on the fly for limited use * @param whereEntityCondition * The EntityCondition object that specifies how to constrain * this query before any groupings are done (if this is a view * entity with group-by aliases) * @param havingEntityCondition * The EntityCondition object that specifies how to constrain * this query after any groupings are done (if this is a view * entity with group-by aliases) * @param fieldsToSelect * The fields of the named entity to get from the database; if * empty or null all fields will be retreived * @param orderBy * The fields of the named entity to order the query by; * optionally add a " ASC" for ascending or " DESC" for * descending * @param findOptions * An instance of EntityFindOptions that specifies advanced query * options. See the EntityFindOptions JavaDoc for more details. * @return EntityListIterator representing the result of the query: NOTE * THAT THIS MUST BE CLOSED WHEN YOU ARE DONE WITH IT, AND DON'T * LEAVE IT OPEN TOO LONG BEACUSE IT WILL MAINTAIN A DATABASE * CONNECTION. */ EntityListIterator findListIteratorByCondition(DynamicViewEntity dynamicViewEntity, EntityCondition whereEntityCondition, EntityCondition havingEntityCondition, Collection<String> fieldsToSelect, List<String> orderBy, EntityFindOptions findOptions) throws GenericEntityException; /** * Find a Generic Entity by its primary key. * * @param entityName The Name of the Entity as defined in the entity XML file * @param useCache Retrieve the Generic Entity from the cache when <code>true</code> * @param fields The fields of the named entity to query by with their corresponding values * @return The Generic Entity corresponding to the primary key * @throws GenericEntityException */ GenericValue findOne(String entityName, boolean useCache, Object... fields) throws GenericEntityException; /** * Find a Generic Entity by its Primary Key * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values * @return The GenericValue corresponding to the primaryKey */ GenericValue findOne(String entityName, Map<String, ? extends Object> fields, boolean useCache) throws GenericEntityException; Cache getCache(); String getCurrentSessionIdentifier(); String getCurrentUserIdentifier(); String getDelegatorName(); String getDelegatorBaseName(); String getDelegatorTenantId(); <T> EntityEcaHandler<T> getEntityEcaHandler(); /** * Gets a field type instance by name from the helper that corresponds to * the specified entity * * @param entity * The entity * @param type * The name of the type * @return ModelFieldType instance for the named type from the helper that * corresponds to the specified entity */ ModelFieldType getEntityFieldType(ModelEntity entity, String type) throws GenericEntityException; /** * Gets field type names from the helper that corresponds to the specified * entity * * @param entity * The entity * @return Collection of field type names from the helper that corresponds * to the specified entity */ Collection<String> getEntityFieldTypeNames(ModelEntity entity) throws GenericEntityException; /** * Gets the helper name that corresponds to this delegator and the specified * entityName * * @param entityName * The name of the entity to get the helper for * @return String with the helper name that corresponds to this delegator * and the specified entityName */ String getEntityGroupName(String entityName); /** * Gets the an instance of helper that corresponds to this delegator and the * specified entity * * @param entity * The entity to get the helper for * @return GenericHelper that corresponds to this delegator and the * specified entity */ GenericHelper getEntityHelper(ModelEntity entity) throws GenericEntityException; /** * Gets the an instance of helper that corresponds to this delegator and the * specified entityName * * @param entityName * The name of the entity to get the helper for * @return GenericHelper that corresponds to this delegator and the * specified entityName */ GenericHelper getEntityHelper(String entityName) throws GenericEntityException; /** * Gets the helper name that corresponds to this delegator and the specified * entity * * @param entity * The entity to get the helper for * @return String with the helper name that corresponds to this delegator * and the specified entity */ String getEntityHelperName(ModelEntity entity); /** * Gets the helper name that corresponds to this delegator and the specified * entityName * * @param entityName * The name of the entity to get the helper name for * @return String with the helper name that corresponds to this delegator * and the specified entityName */ String getEntityHelperName(String entityName); GenericValue getFromPrimaryKeyCache(GenericPK primaryKey); /** * Gets the helper name that corresponds to this delegator and the specified * entityName * * @param groupName * The name of the group to get the helper name for * @return String with the helper name that corresponds to this delegator * and the specified entityName */ String getGroupHelperName(String groupName); GenericHelperInfo getGroupHelperInfo(String entityGroupName); /** * Gets the instance of ModelEntity that corresponds to this delegator and * the specified entityName * * @param entityName * The name of the entity to get * @return ModelEntity that corresponds to this delegator and the specified * entityName */ ModelEntity getModelEntity(String entityName); /** * Gets a Map of entity name and entity model pairs that are in the named * group * * @param groupName The name of the group * @return Map of entityName String keys and ModelEntity instance values */ Map<String, ModelEntity> getModelEntityMapByGroup(String groupName) throws GenericEntityException; ModelFieldTypeReader getModelFieldTypeReader(ModelEntity entity); /** * Gets the instance of ModelGroupReader that corresponds to this delegator * * @return ModelGroupReader that corresponds to this delegator */ ModelGroupReader getModelGroupReader(); /** * Gets the instance of ModelReader that corresponds to this delegator * * @return ModelReader that corresponds to this delegator */ ModelReader getModelReader(); /** * Get the named Related Entity for the GenericValue from the persistent * store across another Relation. Helps to get related Values in a * multi-to-multi relationship. * * @param relationNameOne * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file, for first relation * @param relationNameTwo * String containing the relation name for second relation * @param value * GenericValue instance containing the entity * @param orderBy * The fields of the named entity to order the query by; may be * null; optionally add a " ASC" for ascending or " DESC" for * descending * @return List of GenericValue instances as specified in the relation * definition */ List<GenericValue> getMultiRelation(GenericValue value, String relationNameOne, String relationNameTwo, List<String> orderBy) throws GenericEntityException; /** * Get the next guaranteed unique seq id from the sequence with the given * sequence name; if the named sequence doesn't exist, it will be created * * @param seqName * The name of the sequence to get the next seq id from * @return String with the next sequenced id for the given sequence name */ String getNextSeqId(String seqName); /** * Get the next guaranteed unique seq id from the sequence with the given * sequence name; if the named sequence doesn't exist, it will be created * * @param seqName * The name of the sequence to get the next seq id from * @param staggerMax * The maximum amount to stagger the sequenced ID, if 1 the * sequence will be incremented by 1, otherwise the current * sequence ID will be incremented by a value between 1 and * staggerMax * @return Long with the next seq id for the given sequence name */ String getNextSeqId(String seqName, long staggerMax); /** * Get the next guaranteed unique seq id from the sequence with the given * sequence name; if the named sequence doesn't exist, it will be created * * @param seqName * The name of the sequence to get the next seq id from * @return Long with the next sequenced id for the given sequence name */ Long getNextSeqIdLong(String seqName); /** * Get the next guaranteed unique seq id from the sequence with the given * sequence name; if the named sequence doesn't exist, it will be created * * @param seqName * The name of the sequence to get the next seq id from * @param staggerMax * The maximum amount to stagger the sequenced ID, if 1 the * sequence will be incremented by 1, otherwise the current * sequence ID will be incremented by a value between 1 and * staggerMax * @return Long with the next seq id for the given sequence name */ Long getNextSeqIdLong(String seqName, long staggerMax); /** * Gets the name of the server configuration that corresponds to this * delegator * * @return server configuration name */ String getOriginalDelegatorName(); /** * Get the named Related Entity for the GenericValue from the persistent * store * * @param relationName * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file * @param byAndFields * the fields that must equal in order to keep; may be null * @param orderBy * The fields of the named entity to order the query by; may be * null; optionally add a " ASC" for ascending or " DESC" for * descending * @param value * GenericValue instance containing the entity * @param useCache * Whether to cache the results * @return List of GenericValue instances as specified in the relation * definition */ List<GenericValue> getRelated(String relationName, Map<String, ? extends Object> byAndFields, List<String> orderBy, GenericValue value, boolean useCache) throws GenericEntityException; /** * Get a dummy primary key for the named Related Entity for the GenericValue. * * @param relationName * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file * @param byAndFields * the fields that must equal in order to keep; may be null * @param value * GenericValue instance containing the entity * @return GenericPK containing a possibly incomplete PrimaryKey object * representing the related entity or entities */ GenericPK getRelatedDummyPK(String relationName, Map<String, ? extends Object> byAndFields, GenericValue value) throws GenericEntityException; /** * Get related entity where relation is of type one, uses findByPrimaryKey * * @param relationName * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file * @param value * GenericValue instance containing the entity * @param useCache * Whether to cache the results * @return GenericValue that is the related entity * @throws IllegalArgumentException * if the list found has more than one item */ GenericValue getRelatedOne(String relationName, GenericValue value, boolean useCache) throws GenericEntityException; void initEntityEcaHandler(); void initDistributedCacheClear(); GenericPK makePK(Element element); /** Creates a Primary Key in the form of a GenericPK without persisting it */ GenericPK makePK(String entityName); /** Creates a Primary Key in the form of a GenericPK without persisting it */ GenericPK makePK(String entityName, Map<String, ? extends Object> fields); /** Creates a Primary Key in the form of a GenericPK without persisting it */ GenericPK makePK(String entityName, Object... fields); /** Creates a Primary Key in the form of a GenericPK without persisting it */ GenericPK makePKSingle(String entityName, Object singlePkValue); Delegator makeTestDelegator(String delegatorName); /** * Creates a Entity in the form of a GenericValue without persisting it; * only valid fields will be pulled from the fields Map */ GenericValue makeValidValue(String entityName, Map<String, ? extends Object> fields); /** * Creates a Entity in the form of a GenericValue without persisting it; * only valid fields will be pulled from the fields Map */ GenericValue makeValidValue(String entityName, Object... fields); GenericValue makeValue(Element element); /** Creates a Entity in the form of a GenericValue without persisting it */ GenericValue makeValue(String entityName); /** Creates a Entity in the form of a GenericValue without persisting it */ GenericValue makeValue(String entityName, Map<String, ? extends Object> fields); /** Creates a Entity in the form of a GenericValue without persisting it */ GenericValue makeValue(String entityName, Object... fields); List<GenericValue> makeValues(Document document); /** Creates a Entity in the form of a GenericValue without persisting it */ GenericValue makeValueSingle(String entityName, Object singlePkValue); void putAllInPrimaryKeyCache(List<GenericValue> values); void putInPrimaryKeyCache(GenericPK primaryKey, GenericValue value); // ======= XML Related Methods ======== List<GenericValue> readXmlDocument(URL url) throws SAXException, ParserConfigurationException, java.io.IOException; /** * Refresh the Entity for the GenericValue from the persistent store * * @param value * GenericValue instance containing the entity to refresh */ void refresh(GenericValue value) throws GenericEntityException; /** * Refresh the Entity for the GenericValue from the cache * * @param value * GenericValue instance containing the entity to refresh */ void refreshFromCache(GenericValue value) throws GenericEntityException; /** Refreshes the ID sequencer clearing all cached bank values. */ void refreshSequencer(); /** * <p>Remove the Entities from the List from the persistent store.</p> * <p>The List contains GenericEntity objects, can be either GenericPK or * GenericValue. </p> * <p>If a certain entity contains a complete primary key, the entity in * the datasource corresponding to that primary key will be removed, this * is like a removeByPrimary Key.</p> * <p>On the other hand, if a certain entity is an incomplete or non * primary key, if will behave like the removeByAnd method. </p> * <p>These updates all happen in one transaction, so they will either * all succeed or all fail, if the data source supports transactions.</p> * * @param dummyPKs * Collection of GenericEntity instances containing the entities * or by and fields to remove * @return int representing number of rows effected by this operation */ int removeAll(List<? extends GenericEntity> dummyPKs) throws GenericEntityException; int removeAll(String entityName) throws GenericEntityException; /** * Removes/deletes Generic Entity records found by all of the specified * fields (ie: combined using AND) * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values * @return int representing number of rows effected by this operation */ int removeByAnd(String entityName, Map<String, ? extends Object> fields) throws GenericEntityException; /** * Removes/deletes Generic Entity records found by all of the specified * fields (ie: combined using AND) * * @param entityName * The Name of the Entity as defined in the entity XML file * @param fields * The fields of the named entity to query by with their * corresponding values * @return int representing number of rows effected by this operation */ int removeByAnd(String entityName, Object... fields) throws GenericEntityException; /** * Removes/deletes Generic Entity records found by the condition * * @param entityName * The Name of the Entity as defined in the entity XML file * @param condition * The condition used to restrict the removing * @return int representing number of rows effected by this operation */ int removeByCondition(String entityName, EntityCondition condition) throws GenericEntityException; /** * Remove a Generic Entity corresponding to the primaryKey * * @param primaryKey * The primary key of the entity to remove. * @return int representing number of rows effected by this operation */ int removeByPrimaryKey(GenericPK primaryKey) throws GenericEntityException; /** * Remove the named Related Entity for the GenericValue from the persistent * store * * @param relationName * String containing the relation name which is the combination * of relation.title and relation.rel-entity-name as specified in * the entity XML definition file * @param value * GenericValue instance containing the entity * @return int representing number of rows effected by this operation */ int removeRelated(String relationName, GenericValue value) throws GenericEntityException; /** * Remove a Generic Value from the database * * @param value * The GenericValue object of the entity to remove. * @return int representing number of rows effected by this operation */ int removeValue(GenericValue value) throws GenericEntityException; void rollback(); void setDistributedCacheClear(DistributedCacheClear distributedCacheClear); void setEntityCrypto(EntityCrypto crypto); <T> void setEntityEcaHandler(EntityEcaHandler<T> entityEcaHandler); /** * Look at existing values for a sub-entity with a sequenced secondary ID, * and get the highest plus 1 */ void setNextSubSeqId(GenericValue value, String seqFieldName, int numericPadding, int incrementBy); /** * Allows you to pass a SequenceUtil class (possibly one that overrides the * getNextSeqId method); if null is passed will effectively refresh the * sequencer. */ void setSequencer(SequenceUtil sequencer); /** * Store the Entity from the GenericValue to the persistent store * * @param value * GenericValue instance containing the entity * @return int representing number of rows effected by this operation */ int store(GenericValue value) throws GenericEntityException; /** * <p>Store the Entities from the List GenericValue instances to the persistent * store.</p> * <p>This is different than the normal store method in that the * store method only does an update, while the storeAll method checks to see * if each entity exists, then either does an insert or an update as * appropriate.</p> * <p>These updates all happen in one transaction, so they * will either all succeed or all fail, if the data source supports * transactions. This is just like to othersToStore feature of the * GenericEntity on a create or store.</p> * * @param values * List of GenericValue instances containing the entities to * store * @return int representing number of rows effected by this operation */ int storeAll(List<GenericValue> values) throws GenericEntityException; /** * <p>Store the Entities from the List GenericValue instances to the persistent * store.</p> * <p>This is different than the normal store method in that the * store method only does an update, while the storeAll method checks to see * if each entity exists, then either does an insert or an update as * appropriate.</p> * <p>These updates all happen in one transaction, so they * will either all succeed or all fail, if the data source supports * transactions. This is just like to othersToStore feature of the * GenericEntity on a create or store.</p> * * @param storeOptions * An instance of EntityStoreOptions that specifies advanced store * options or null for default values. * See the EntityStoreOptions JavaDoc for more details. * @param values * List of GenericValue instances containing the entities to * store * @return int representing number of rows effected by this operation */ int storeAll(List<GenericValue> values, EntityStoreOptions storeOptions) throws GenericEntityException; /** * Store a group of values. * * @param entityName * The name of the Entity as defined in the entity XML file * @param fieldsToSet * The fields of the named entity to set in the database * @param condition * The condition that restricts the list of stored values * @return int representing number of rows effected by this operation * @throws GenericEntityException */ int storeByCondition(String entityName, Map<String, ? extends Object> fieldsToSet, EntityCondition condition) throws GenericEntityException; /** * Get use of Distributed Cache Clear mechanism status * @return boolean true if this delegator uses a Distributed Cache Clear mechanism */ boolean useDistributedCacheClear(); }
apache/hadoop-common
37,856
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.jobhistory; import java.io.IOException; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal; import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; /** * The job history events get routed to this class. This class writes the Job * history events to the DFS directly into a staging dir and then moved to a * done-dir. JobHistory implementation is in this package to access package * private classes. */ public class JobHistoryEventHandler extends AbstractService implements EventHandler<JobHistoryEvent> { private final AppContext context; private final int startCount; private int eventCounter; // Those file systems may differ from the job configuration // See org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils // #ensurePathInDefaultFileSystem private FileSystem stagingDirFS; // log Dir FileSystem private FileSystem doneDirFS; // done Dir FileSystem private Path stagingDirPath = null; private Path doneDirPrefixPath = null; // folder for completed jobs private int maxUnflushedCompletionEvents; private int postJobCompletionMultiplier; private long flushTimeout; private int minQueueSizeForBatchingFlushes; // TODO: Rename private int numUnflushedCompletionEvents = 0; private boolean isTimerActive; protected BlockingQueue<JobHistoryEvent> eventQueue = new LinkedBlockingQueue<JobHistoryEvent>(); protected Thread eventHandlingThread; private volatile boolean stopped; private final Object lock = new Object(); private static final Log LOG = LogFactory.getLog( JobHistoryEventHandler.class); protected static final Map<JobId, MetaInfo> fileMap = Collections.<JobId,MetaInfo>synchronizedMap(new HashMap<JobId,MetaInfo>()); // should job completion be force when the AM shuts down? protected volatile boolean forceJobCompletion = false; public JobHistoryEventHandler(AppContext context, int startCount) { super("JobHistoryEventHandler"); this.context = context; this.startCount = startCount; } /* (non-Javadoc) * @see org.apache.hadoop.yarn.service.AbstractService#init(org. * apache.hadoop.conf.Configuration) * Initializes the FileSystem and Path objects for the log and done directories. * Creates these directories if they do not already exist. */ @Override protected void serviceInit(Configuration conf) throws Exception { String jobId = TypeConverter.fromYarn(context.getApplicationID()).toString(); String stagingDirStr = null; String doneDirStr = null; String userDoneDirStr = null; try { stagingDirStr = JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId); doneDirStr = JobHistoryUtils.getConfiguredHistoryIntermediateDoneDirPrefix(conf); userDoneDirStr = JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf); } catch (IOException e) { LOG.error("Failed while getting the configured log directories", e); throw new YarnRuntimeException(e); } //Check for the existence of the history staging dir. Maybe create it. try { stagingDirPath = FileContext.getFileContext(conf).makeQualified(new Path(stagingDirStr)); stagingDirFS = FileSystem.get(stagingDirPath.toUri(), conf); mkdir(stagingDirFS, stagingDirPath, new FsPermission( JobHistoryUtils.HISTORY_STAGING_DIR_PERMISSIONS)); } catch (IOException e) { LOG.error("Failed while checking for/creating history staging path: [" + stagingDirPath + "]", e); throw new YarnRuntimeException(e); } //Check for the existence of intermediate done dir. Path doneDirPath = null; try { doneDirPath = FileContext.getFileContext(conf).makeQualified(new Path(doneDirStr)); doneDirFS = FileSystem.get(doneDirPath.toUri(), conf); // This directory will be in a common location, or this may be a cluster // meant for a single user. Creating based on the conf. Should ideally be // created by the JobHistoryServer or as part of deployment. if (!doneDirFS.exists(doneDirPath)) { if (JobHistoryUtils.shouldCreateNonUserDirectory(conf)) { LOG.info("Creating intermediate history logDir: [" + doneDirPath + "] + based on conf. Should ideally be created by the JobHistoryServer: " + MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR); mkdir( doneDirFS, doneDirPath, new FsPermission( JobHistoryUtils.HISTORY_INTERMEDIATE_DONE_DIR_PERMISSIONS .toShort())); // TODO Temporary toShort till new FsPermission(FsPermissions) // respects // sticky } else { String message = "Not creating intermediate history logDir: [" + doneDirPath + "] based on conf: " + MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR + ". Either set to true or pre-create this directory with" + " appropriate permissions"; LOG.error(message); throw new YarnRuntimeException(message); } } } catch (IOException e) { LOG.error("Failed checking for the existance of history intermediate " + "done directory: [" + doneDirPath + "]"); throw new YarnRuntimeException(e); } //Check/create user directory under intermediate done dir. try { doneDirPrefixPath = FileContext.getFileContext(conf).makeQualified(new Path(userDoneDirStr)); mkdir(doneDirFS, doneDirPrefixPath, new FsPermission( JobHistoryUtils.HISTORY_INTERMEDIATE_USER_DIR_PERMISSIONS)); } catch (IOException e) { LOG.error("Error creating user intermediate history done directory: [ " + doneDirPrefixPath + "]", e); throw new YarnRuntimeException(e); } // Maximum number of unflushed completion-events that can stay in the queue // before flush kicks in. maxUnflushedCompletionEvents = conf.getInt(MRJobConfig.MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS, MRJobConfig.DEFAULT_MR_AM_HISTORY_MAX_UNFLUSHED_COMPLETE_EVENTS); // We want to cut down flushes after job completes so as to write quicker, // so we increase maxUnflushedEvents post Job completion by using the // following multiplier. postJobCompletionMultiplier = conf.getInt( MRJobConfig.MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER, MRJobConfig.DEFAULT_MR_AM_HISTORY_JOB_COMPLETE_UNFLUSHED_MULTIPLIER); // Max time until which flush doesn't take place. flushTimeout = conf.getLong(MRJobConfig.MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS, MRJobConfig.DEFAULT_MR_AM_HISTORY_COMPLETE_EVENT_FLUSH_TIMEOUT_MS); minQueueSizeForBatchingFlushes = conf.getInt( MRJobConfig.MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD, MRJobConfig.DEFAULT_MR_AM_HISTORY_USE_BATCHED_FLUSH_QUEUE_SIZE_THRESHOLD); super.serviceInit(conf); } private void mkdir(FileSystem fs, Path path, FsPermission fsp) throws IOException { if (!fs.exists(path)) { try { fs.mkdirs(path, fsp); FileStatus fsStatus = fs.getFileStatus(path); LOG.info("Perms after creating " + fsStatus.getPermission().toShort() + ", Expected: " + fsp.toShort()); if (fsStatus.getPermission().toShort() != fsp.toShort()) { LOG.info("Explicitly setting permissions to : " + fsp.toShort() + ", " + fsp); fs.setPermission(path, fsp); } } catch (FileAlreadyExistsException e) { LOG.info("Directory: [" + path + "] already exists."); } } } @Override protected void serviceStart() throws Exception { eventHandlingThread = new Thread(new Runnable() { @Override public void run() { JobHistoryEvent event = null; while (!stopped && !Thread.currentThread().isInterrupted()) { // Log the size of the history-event-queue every so often. if (eventCounter != 0 && eventCounter % 1000 == 0) { eventCounter = 0; LOG.info("Size of the JobHistory event queue is " + eventQueue.size()); } else { eventCounter++; } try { event = eventQueue.take(); } catch (InterruptedException e) { LOG.info("EventQueue take interrupted. Returning"); return; } // If an event has been removed from the queue. Handle it. // The rest of the queue is handled via stop() // Clear the interrupt status if it's set before calling handleEvent // and set it if it was set before calling handleEvent. // Interrupts received from other threads during handleEvent cannot be // dealth with - Shell.runCommand() ignores them. synchronized (lock) { boolean isInterrupted = Thread.interrupted(); handleEvent(event); if (isInterrupted) { LOG.debug("Event handling interrupted"); Thread.currentThread().interrupt(); } } } } }, "eventHandlingThread"); eventHandlingThread.start(); super.serviceStart(); } @Override protected void serviceStop() throws Exception { LOG.info("Stopping JobHistoryEventHandler. " + "Size of the outstanding queue size is " + eventQueue.size()); stopped = true; //do not interrupt while event handling is in progress synchronized(lock) { if (eventHandlingThread != null) { LOG.debug("Interrupting Event Handling thread"); eventHandlingThread.interrupt(); } else { LOG.debug("Null event handling thread"); } } try { if (eventHandlingThread != null) { LOG.debug("Waiting for Event Handling thread to complete"); eventHandlingThread.join(); } } catch (InterruptedException ie) { LOG.info("Interrupted Exception while stopping", ie); } // Cancel all timers - so that they aren't invoked during or after // the metaInfo object is wrapped up. for (MetaInfo mi : fileMap.values()) { try { if (LOG.isDebugEnabled()) { LOG.debug("Shutting down timer for " + mi); } mi.shutDownTimer(); } catch (IOException e) { LOG.info("Exception while cancelling delayed flush timer. " + "Likely caused by a failed flush " + e.getMessage()); } } //write all the events remaining in queue Iterator<JobHistoryEvent> it = eventQueue.iterator(); while(it.hasNext()) { JobHistoryEvent ev = it.next(); LOG.info("In stop, writing event " + ev.getType()); handleEvent(ev); } // Process JobUnsuccessfulCompletionEvent for jobIds which still haven't // closed their event writers Iterator<JobId> jobIt = fileMap.keySet().iterator(); if(forceJobCompletion) { while (jobIt.hasNext()) { JobId toClose = jobIt.next(); MetaInfo mi = fileMap.get(toClose); if(mi != null && mi.isWriterActive()) { LOG.warn("Found jobId " + toClose + " to have not been closed. Will close"); //Create a JobFinishEvent so that it is written to the job history final Job job = context.getJob(toClose); JobUnsuccessfulCompletionEvent jucEvent = new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(toClose), System.currentTimeMillis(), job.getCompletedMaps(), job.getCompletedReduces(), createJobStateForJobUnsuccessfulCompletionEvent( mi.getForcedJobStateOnShutDown()), job.getDiagnostics()); JobHistoryEvent jfEvent = new JobHistoryEvent(toClose, jucEvent); //Bypass the queue mechanism which might wait. Call the method directly handleEvent(jfEvent); } } } //close all file handles for (MetaInfo mi : fileMap.values()) { try { mi.closeWriter(); } catch (IOException e) { LOG.info("Exception while closing file " + e.getMessage()); } } LOG.info("Stopped JobHistoryEventHandler. super.stop()"); super.serviceStop(); } protected EventWriter createEventWriter(Path historyFilePath) throws IOException { FSDataOutputStream out = stagingDirFS.create(historyFilePath, true); return new EventWriter(out); } /** * Create an event writer for the Job represented by the jobID. * Writes out the job configuration to the log directory. * This should be the first call to history for a job * * @param jobId the jobId. * @param forcedJobStateOnShutDown * @throws IOException */ protected void setupEventWriter(JobId jobId, String forcedJobStateOnShutDown) throws IOException { if (stagingDirPath == null) { LOG.error("Log Directory is null, returning"); throw new IOException("Missing Log Directory for History"); } MetaInfo oldFi = fileMap.get(jobId); Configuration conf = getConfig(); // TODO Ideally this should be written out to the job dir // (.staging/jobid/files - RecoveryService will need to be patched) Path historyFile = JobHistoryUtils.getStagingJobHistoryFile( stagingDirPath, jobId, startCount); String user = UserGroupInformation.getCurrentUser().getShortUserName(); if (user == null) { throw new IOException( "User is null while setting up jobhistory eventwriter"); } String jobName = context.getJob(jobId).getName(); EventWriter writer = (oldFi == null) ? null : oldFi.writer; Path logDirConfPath = JobHistoryUtils.getStagingConfFile(stagingDirPath, jobId, startCount); if (writer == null) { try { writer = createEventWriter(historyFile); LOG.info("Event Writer setup for JobId: " + jobId + ", File: " + historyFile); } catch (IOException ioe) { LOG.info("Could not create log file: [" + historyFile + "] + for job " + "[" + jobName + "]"); throw ioe; } //Write out conf only if the writer isn't already setup. if (conf != null) { // TODO Ideally this should be written out to the job dir // (.staging/jobid/files - RecoveryService will need to be patched) FSDataOutputStream jobFileOut = null; try { if (logDirConfPath != null) { jobFileOut = stagingDirFS.create(logDirConfPath, true); conf.writeXml(jobFileOut); jobFileOut.close(); } } catch (IOException e) { LOG.info("Failed to write the job configuration file", e); throw e; } } } String queueName = JobConf.DEFAULT_QUEUE_NAME; if (conf != null) { queueName = conf.get(MRJobConfig.QUEUE_NAME, JobConf.DEFAULT_QUEUE_NAME); } MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer, user, jobName, jobId, forcedJobStateOnShutDown, queueName); fi.getJobSummary().setJobId(jobId); fileMap.put(jobId, fi); } /** Close the event writer for this id * @throws IOException */ public void closeWriter(JobId id) throws IOException { try { final MetaInfo mi = fileMap.get(id); if (mi != null) { mi.closeWriter(); } } catch (IOException e) { LOG.error("Error closing writer for JobID: " + id); throw e; } } @Override public void handle(JobHistoryEvent event) { try { if (isJobCompletionEvent(event.getHistoryEvent())) { // When the job is complete, flush slower but write faster. maxUnflushedCompletionEvents = maxUnflushedCompletionEvents * postJobCompletionMultiplier; } eventQueue.put(event); } catch (InterruptedException e) { throw new YarnRuntimeException(e); } } private boolean isJobCompletionEvent(HistoryEvent historyEvent) { if (EnumSet.of(EventType.JOB_FINISHED, EventType.JOB_FAILED, EventType.JOB_KILLED).contains(historyEvent.getEventType())) { return true; } return false; } @Private public void handleEvent(JobHistoryEvent event) { synchronized (lock) { // If this is JobSubmitted Event, setup the writer if (event.getHistoryEvent().getEventType() == EventType.AM_STARTED) { try { AMStartedEvent amStartedEvent = (AMStartedEvent) event.getHistoryEvent(); setupEventWriter(event.getJobID(), amStartedEvent.getForcedJobStateOnShutDown()); } catch (IOException ioe) { LOG.error("Error JobHistoryEventHandler in handleEvent: " + event, ioe); throw new YarnRuntimeException(ioe); } } // For all events // (1) Write it out // (2) Process it for JobSummary MetaInfo mi = fileMap.get(event.getJobID()); try { HistoryEvent historyEvent = event.getHistoryEvent(); if (! (historyEvent instanceof NormalizedResourceEvent)) { mi.writeEvent(historyEvent); } processEventForJobSummary(event.getHistoryEvent(), mi.getJobSummary(), event.getJobID()); if (LOG.isDebugEnabled()) { LOG.debug("In HistoryEventHandler " + event.getHistoryEvent().getEventType()); } } catch (IOException e) { LOG.error("Error writing History Event: " + event.getHistoryEvent(), e); throw new YarnRuntimeException(e); } if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) { JobSubmittedEvent jobSubmittedEvent = (JobSubmittedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setSubmitTime(jobSubmittedEvent.getSubmitTime()); mi.getJobIndexInfo().setQueueName(jobSubmittedEvent.getJobQueueName()); } //initialize the launchTime in the JobIndexInfo of MetaInfo if(event.getHistoryEvent().getEventType() == EventType.JOB_INITED ){ JobInitedEvent jie = (JobInitedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setJobStartTime(jie.getLaunchTime()); } if (event.getHistoryEvent().getEventType() == EventType.JOB_QUEUE_CHANGED) { JobQueueChangeEvent jQueueEvent = (JobQueueChangeEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setQueueName(jQueueEvent.getJobQueueName()); } // If this is JobFinishedEvent, close the writer and setup the job-index if (event.getHistoryEvent().getEventType() == EventType.JOB_FINISHED) { try { JobFinishedEvent jFinishedEvent = (JobFinishedEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jFinishedEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jFinishedEvent.getFinishedMaps()); mi.getJobIndexInfo().setNumReduces( jFinishedEvent.getFinishedReduces()); mi.getJobIndexInfo().setJobStatus(JobState.SUCCEEDED.toString()); closeEventWriter(event.getJobID()); processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } // In case of JOB_ERROR, only process all the Done files(e.g. job // summary, job history file etc.) if it is last AM retry. if (event.getHistoryEvent().getEventType() == EventType.JOB_ERROR) { try { JobUnsuccessfulCompletionEvent jucEvent = (JobUnsuccessfulCompletionEvent) event.getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jucEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jucEvent.getFinishedMaps()); mi.getJobIndexInfo().setNumReduces(jucEvent.getFinishedReduces()); mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); if(context.isLastAMRetry()) processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } if (event.getHistoryEvent().getEventType() == EventType.JOB_FAILED || event.getHistoryEvent().getEventType() == EventType.JOB_KILLED) { try { JobUnsuccessfulCompletionEvent jucEvent = (JobUnsuccessfulCompletionEvent) event .getHistoryEvent(); mi.getJobIndexInfo().setFinishTime(jucEvent.getFinishTime()); mi.getJobIndexInfo().setNumMaps(jucEvent.getFinishedMaps()); mi.getJobIndexInfo().setNumReduces(jucEvent.getFinishedReduces()); mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); processDoneFiles(event.getJobID()); } catch (IOException e) { throw new YarnRuntimeException(e); } } } } public void processEventForJobSummary(HistoryEvent event, JobSummary summary, JobId jobId) { // context.getJob could be used for some of this info as well. switch (event.getEventType()) { case JOB_SUBMITTED: JobSubmittedEvent jse = (JobSubmittedEvent) event; summary.setUser(jse.getUserName()); summary.setQueue(jse.getJobQueueName()); summary.setJobSubmitTime(jse.getSubmitTime()); summary.setJobName(jse.getJobName()); break; case NORMALIZED_RESOURCE: NormalizedResourceEvent normalizedResourceEvent = (NormalizedResourceEvent) event; if (normalizedResourceEvent.getTaskType() == TaskType.MAP) { summary.setResourcesPerMap(normalizedResourceEvent.getMemory()); } else if (normalizedResourceEvent.getTaskType() == TaskType.REDUCE) { summary.setResourcesPerReduce(normalizedResourceEvent.getMemory()); } break; case JOB_INITED: JobInitedEvent jie = (JobInitedEvent) event; summary.setJobLaunchTime(jie.getLaunchTime()); break; case MAP_ATTEMPT_STARTED: TaskAttemptStartedEvent mtase = (TaskAttemptStartedEvent) event; if (summary.getFirstMapTaskLaunchTime() == 0) summary.setFirstMapTaskLaunchTime(mtase.getStartTime()); break; case REDUCE_ATTEMPT_STARTED: TaskAttemptStartedEvent rtase = (TaskAttemptStartedEvent) event; if (summary.getFirstReduceTaskLaunchTime() == 0) summary.setFirstReduceTaskLaunchTime(rtase.getStartTime()); break; case JOB_FINISHED: JobFinishedEvent jfe = (JobFinishedEvent) event; summary.setJobFinishTime(jfe.getFinishTime()); summary.setNumFinishedMaps(jfe.getFinishedMaps()); summary.setNumFailedMaps(jfe.getFailedMaps()); summary.setNumFinishedReduces(jfe.getFinishedReduces()); summary.setNumFailedReduces(jfe.getFailedReduces()); if (summary.getJobStatus() == null) summary .setJobStatus(org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED .toString()); // TODO JOB_FINISHED does not have state. Effectively job history does not // have state about the finished job. setSummarySlotSeconds(summary, jfe.getTotalCounters()); break; case JOB_FAILED: case JOB_KILLED: JobUnsuccessfulCompletionEvent juce = (JobUnsuccessfulCompletionEvent) event; summary.setJobStatus(juce.getStatus()); summary.setNumFinishedMaps(context.getJob(jobId).getTotalMaps()); summary.setNumFinishedReduces(context.getJob(jobId).getTotalReduces()); summary.setJobFinishTime(juce.getFinishTime()); setSummarySlotSeconds(summary, context.getJob(jobId).getAllCounters()); break; default: break; } } private void setSummarySlotSeconds(JobSummary summary, Counters allCounters) { Counter slotMillisMapCounter = allCounters .findCounter(JobCounter.SLOTS_MILLIS_MAPS); if (slotMillisMapCounter != null) { summary.setMapSlotSeconds(slotMillisMapCounter.getValue() / 1000); } Counter slotMillisReduceCounter = allCounters .findCounter(JobCounter.SLOTS_MILLIS_REDUCES); if (slotMillisReduceCounter != null) { summary.setReduceSlotSeconds(slotMillisReduceCounter.getValue() / 1000); } } protected void closeEventWriter(JobId jobId) throws IOException { final MetaInfo mi = fileMap.get(jobId); if (mi == null) { throw new IOException("No MetaInfo found for JobId: [" + jobId + "]"); } if (!mi.isWriterActive()) { throw new IOException( "Inactive Writer: Likely received multiple JobFinished / " + "JobUnsuccessful events for JobId: [" + jobId + "]"); } // Close the Writer try { mi.closeWriter(); } catch (IOException e) { LOG.error("Error closing writer for JobID: " + jobId); throw e; } } protected void processDoneFiles(JobId jobId) throws IOException { final MetaInfo mi = fileMap.get(jobId); if (mi == null) { throw new IOException("No MetaInfo found for JobId: [" + jobId + "]"); } if (mi.getHistoryFile() == null) { LOG.warn("No file for job-history with " + jobId + " found in cache!"); } if (mi.getConfFile() == null) { LOG.warn("No file for jobconf with " + jobId + " found in cache!"); } // Writing out the summary file. // TODO JH enhancement - reuse this file to store additional indexing info // like ACLs, etc. JHServer can use HDFS append to build an index file // with more info than is available via the filename. Path qualifiedSummaryDoneFile = null; FSDataOutputStream summaryFileOut = null; try { String doneSummaryFileName = getTempFileName(JobHistoryUtils .getIntermediateSummaryFileName(jobId)); qualifiedSummaryDoneFile = doneDirFS.makeQualified(new Path( doneDirPrefixPath, doneSummaryFileName)); summaryFileOut = doneDirFS.create(qualifiedSummaryDoneFile, true); summaryFileOut.writeUTF(mi.getJobSummary().getJobSummaryString()); summaryFileOut.close(); doneDirFS.setPermission(qualifiedSummaryDoneFile, new FsPermission( JobHistoryUtils.HISTORY_INTERMEDIATE_FILE_PERMISSIONS)); } catch (IOException e) { LOG.info("Unable to write out JobSummaryInfo to [" + qualifiedSummaryDoneFile + "]", e); throw e; } try { // Move historyFile to Done Folder. Path qualifiedDoneFile = null; if (mi.getHistoryFile() != null) { Path historyFile = mi.getHistoryFile(); Path qualifiedLogFile = stagingDirFS.makeQualified(historyFile); String doneJobHistoryFileName = getTempFileName(FileNameIndexUtils.getDoneFileName(mi .getJobIndexInfo())); qualifiedDoneFile = doneDirFS.makeQualified(new Path(doneDirPrefixPath, doneJobHistoryFileName)); moveToDoneNow(qualifiedLogFile, qualifiedDoneFile); } // Move confFile to Done Folder Path qualifiedConfDoneFile = null; if (mi.getConfFile() != null) { Path confFile = mi.getConfFile(); Path qualifiedConfFile = stagingDirFS.makeQualified(confFile); String doneConfFileName = getTempFileName(JobHistoryUtils .getIntermediateConfFileName(jobId)); qualifiedConfDoneFile = doneDirFS.makeQualified(new Path(doneDirPrefixPath, doneConfFileName)); moveToDoneNow(qualifiedConfFile, qualifiedConfDoneFile); } moveTmpToDone(qualifiedSummaryDoneFile); moveTmpToDone(qualifiedConfDoneFile); moveTmpToDone(qualifiedDoneFile); } catch (IOException e) { LOG.error("Error closing writer for JobID: " + jobId); throw e; } } private class FlushTimerTask extends TimerTask { private MetaInfo metaInfo; private IOException ioe = null; private volatile boolean shouldRun = true; FlushTimerTask(MetaInfo metaInfo) { this.metaInfo = metaInfo; } @Override public void run() { LOG.debug("In flush timer task"); synchronized (lock) { try { if (!metaInfo.isTimerShutDown() && shouldRun) metaInfo.flush(); } catch (IOException e) { ioe = e; } } } public IOException getException() { return ioe; } public void stop() { shouldRun = false; this.cancel(); } } protected class MetaInfo { private Path historyFile; private Path confFile; private EventWriter writer; JobIndexInfo jobIndexInfo; JobSummary jobSummary; Timer flushTimer; FlushTimerTask flushTimerTask; private boolean isTimerShutDown = false; private String forcedJobStateOnShutDown; MetaInfo(Path historyFile, Path conf, EventWriter writer, String user, String jobName, JobId jobId, String forcedJobStateOnShutDown, String queueName) { this.historyFile = historyFile; this.confFile = conf; this.writer = writer; this.jobIndexInfo = new JobIndexInfo(-1, -1, user, jobName, jobId, -1, -1, null, queueName); this.jobSummary = new JobSummary(); this.flushTimer = new Timer("FlushTimer", true); this.forcedJobStateOnShutDown = forcedJobStateOnShutDown; } Path getHistoryFile() { return historyFile; } Path getConfFile() { return confFile; } JobIndexInfo getJobIndexInfo() { return jobIndexInfo; } JobSummary getJobSummary() { return jobSummary; } boolean isWriterActive() { return writer != null; } boolean isTimerShutDown() { return isTimerShutDown; } String getForcedJobStateOnShutDown() { return forcedJobStateOnShutDown; } @Override public String toString() { return "Job MetaInfo for "+ jobSummary.getJobId() + " history file " + historyFile; } void closeWriter() throws IOException { LOG.debug("Closing Writer"); synchronized (lock) { if (writer != null) { writer.close(); } writer = null; } } void writeEvent(HistoryEvent event) throws IOException { LOG.debug("Writing event"); synchronized (lock) { if (writer != null) { writer.write(event); processEventForFlush(event); maybeFlush(event); } } } void processEventForFlush(HistoryEvent historyEvent) throws IOException { if (EnumSet.of(EventType.MAP_ATTEMPT_FINISHED, EventType.MAP_ATTEMPT_FAILED, EventType.MAP_ATTEMPT_KILLED, EventType.REDUCE_ATTEMPT_FINISHED, EventType.REDUCE_ATTEMPT_FAILED, EventType.REDUCE_ATTEMPT_KILLED, EventType.TASK_FINISHED, EventType.TASK_FAILED, EventType.JOB_FINISHED, EventType.JOB_FAILED, EventType.JOB_KILLED).contains(historyEvent.getEventType())) { numUnflushedCompletionEvents++; if (!isTimerActive) { resetFlushTimer(); if (!isTimerShutDown) { flushTimerTask = new FlushTimerTask(this); flushTimer.schedule(flushTimerTask, flushTimeout); } } } } void resetFlushTimer() throws IOException { if (flushTimerTask != null) { IOException exception = flushTimerTask.getException(); flushTimerTask.stop(); if (exception != null) { throw exception; } flushTimerTask = null; } isTimerActive = false; } void maybeFlush(HistoryEvent historyEvent) throws IOException { if ((eventQueue.size() < minQueueSizeForBatchingFlushes && numUnflushedCompletionEvents > 0) || numUnflushedCompletionEvents >= maxUnflushedCompletionEvents || isJobCompletionEvent(historyEvent)) { this.flush(); } } void flush() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Flushing " + toString()); } synchronized (lock) { if (numUnflushedCompletionEvents != 0) { // skipped timer cancel. writer.flush(); numUnflushedCompletionEvents = 0; resetFlushTimer(); } } } void shutDownTimer() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Shutting down timer "+ toString()); } synchronized (lock) { isTimerShutDown = true; flushTimer.cancel(); if (flushTimerTask != null && flushTimerTask.getException() != null) { throw flushTimerTask.getException(); } } } } private void moveTmpToDone(Path tmpPath) throws IOException { if (tmpPath != null) { String tmpFileName = tmpPath.getName(); String fileName = getFileNameFromTmpFN(tmpFileName); Path path = new Path(tmpPath.getParent(), fileName); doneDirFS.rename(tmpPath, path); LOG.info("Moved tmp to done: " + tmpPath + " to " + path); } } // TODO If the FS objects are the same, this should be a rename instead of a // copy. private void moveToDoneNow(Path fromPath, Path toPath) throws IOException { // check if path exists, in case of retries it may not exist if (stagingDirFS.exists(fromPath)) { LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString()); // TODO temporarily removing the existing dst if (doneDirFS.exists(toPath)) { doneDirFS.delete(toPath, true); } boolean copied = FileUtil.copy(stagingDirFS, fromPath, doneDirFS, toPath, false, getConfig()); if (copied) LOG.info("Copied to done location: " + toPath); else LOG.info("copy failed"); doneDirFS.setPermission(toPath, new FsPermission( JobHistoryUtils.HISTORY_INTERMEDIATE_FILE_PERMISSIONS)); } } boolean pathExists(FileSystem fileSys, Path path) throws IOException { return fileSys.exists(path); } private String getTempFileName(String srcFile) { return srcFile + "_tmp"; } private String getFileNameFromTmpFN(String tmpFileName) { //TODO. Some error checking here. return tmpFileName.substring(0, tmpFileName.length()-4); } public void setForcejobCompletion(boolean forceJobCompletion) { this.forceJobCompletion = forceJobCompletion; LOG.info("JobHistoryEventHandler notified that forceJobCompletion is " + forceJobCompletion); } private String createJobStateForJobUnsuccessfulCompletionEvent( String forcedJobStateOnShutDown) { if (forcedJobStateOnShutDown == null || forcedJobStateOnShutDown .isEmpty()) { return JobState.KILLED.toString(); } else if (forcedJobStateOnShutDown.equals( JobStateInternal.ERROR.toString()) || forcedJobStateOnShutDown.equals(JobStateInternal.FAILED.toString())) { return JobState.FAILED.toString(); } else if (forcedJobStateOnShutDown.equals(JobStateInternal.SUCCEEDED .toString())) { return JobState.SUCCEEDED.toString(); } return JobState.KILLED.toString(); } }
apache/paimon
36,454
paimon-flink/paimon-flink-common/src/test/java/org/apache/paimon/flink/CompositePkAndMultiPartitionedTableITCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.paimon.flink; import org.apache.paimon.CoreOptions; import org.apache.paimon.flink.util.AbstractTestBase; import org.apache.flink.types.Row; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static org.apache.flink.table.planner.factories.TestValuesTableFactory.changelogRow; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.bEnv; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.buildQuery; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.buildSimpleQuery; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.checkFileStorePath; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.createTable; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.createTemporaryTable; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.init; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.insertIntoFromTable; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.insertOverwrite; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.insertOverwritePartition; import static org.apache.paimon.flink.util.ReadWriteTableTestUtil.testBatchRead; /** Paimon IT case when the table has composite primary keys and multiple partition fields. */ public class CompositePkAndMultiPartitionedTableITCase extends AbstractTestBase { private final Map<String, String> staticPartitionOverwrite = Collections.singletonMap(CoreOptions.DYNAMIC_PARTITION_OVERWRITE.key(), "false"); @BeforeEach public void setUp() { init(getTempDirPath()); } @Test public void testBatchWriteWithMultiPartitionedRecordsWithMultiPk() throws Exception { List<Row> initialRecords = Arrays.asList( // to_currency is USD, dt = 2022-01-01, hh = 11 changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "11"), changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01", "11"), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01", "11"), changelogRow("+I", "Yen", "US Dollar", 0.0082d, "2022-01-01", "11"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01", "11"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-01", "11"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "11"), // to_currency is USD, dt = 2022-01-01, hh = 12 changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "12"), changelogRow("+I", "Euro", "US Dollar", 1.12d, "2022-01-01", "12"), changelogRow("+I", "HK Dollar", "US Dollar", 0.129d, "2022-01-01", "12"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-01", "12"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.741d, "2022-01-01", "12"), changelogRow("+I", "Yen", "US Dollar", 0.00812d, "2022-01-01", "12"), // to_currency is Euro, dt = 2022-01-02, hh = 23 changelogRow("+I", "US Dollar", "Euro", 0.918d, "2022-01-02", "23"), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d, "2022-01-02", "23")); String temporaryTable = createTemporaryTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE", "dt STRING", "hh STRING"), Arrays.asList("from_currency", "to_currency", "dt", "hh"), Arrays.asList("dt", "hh"), initialRecords, "dt:2022-01-01,hh:11;dt:2022-01-01,hh:12;dt:2022-01-02,hh:23", true, "I"); String table = createTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE", "dt STRING", "hh STRING"), Arrays.asList("from_currency", "to_currency", "dt", "hh"), Collections.emptyList(), Arrays.asList("dt", "hh")); insertIntoFromTable(temporaryTable, table); checkFileStorePath( table, Arrays.asList("dt=2022-01-01,hh=11", "dt=2022-01-01,hh=12", "dt=2022-01-02,hh=23")); // test batch read testBatchRead( buildSimpleQuery(table), Arrays.asList( // to_currency is USD, dt = 2022-01-01, hh = 11 changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01", "11"), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01", "11"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01", "11"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-01", "11"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "11"), // to_currency is USD, dt = 2022-01-01, hh = 12 changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "12"), changelogRow("+I", "Euro", "US Dollar", 1.12d, "2022-01-01", "12"), changelogRow("+I", "HK Dollar", "US Dollar", 0.129d, "2022-01-01", "12"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.741d, "2022-01-01", "12"), changelogRow("+I", "Yen", "US Dollar", 0.00812d, "2022-01-01", "12"), // to_currency is Euro, dt = 2022-01-02, hh = 23 changelogRow("+I", "US Dollar", "Euro", 0.918d, "2022-01-02", "23"), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d, "2022-01-02", "23"))); // overwrite static partition dt = 2022-01-02 and hh = 23 insertOverwritePartition( table, "PARTITION (dt = '2022-01-02', hh = '23')", "('US Dollar', 'Thai Baht', 33.51)"); // batch read to check partition refresh testBatchRead( buildQuery(table, "*", "WHERE dt = '2022-01-02' AND hh = '23'"), Collections.singletonList( changelogRow("+I", "US Dollar", "Thai Baht", 33.51d, "2022-01-02", "23"))); // test partition filter testBatchRead( buildQuery(table, "*", "WHERE dt = '2022-01-01'"), Arrays.asList( // to_currency is USD, dt = 2022-01-01, hh = 11 changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01", "11"), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01", "11"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01", "11"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-01", "11"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "11"), // to_currency is USD, dt = 2022-01-01, hh = 12 changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "12"), changelogRow("+I", "Euro", "US Dollar", 1.12d, "2022-01-01", "12"), changelogRow("+I", "HK Dollar", "US Dollar", 0.129d, "2022-01-01", "12"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.741d, "2022-01-01", "12"), changelogRow("+I", "Yen", "US Dollar", 0.00812d, "2022-01-01", "12"))); testBatchRead( buildQuery(table, "*", "WHERE dt = '2022-01-01' AND hh = '12'"), Arrays.asList( changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "12"), changelogRow("+I", "Euro", "US Dollar", 1.12d, "2022-01-01", "12"), changelogRow("+I", "HK Dollar", "US Dollar", 0.129d, "2022-01-01", "12"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.741d, "2022-01-01", "12"), changelogRow("+I", "Yen", "US Dollar", 0.00812d, "2022-01-01", "12"))); // test field filter testBatchRead( buildQuery(table, "*", "WHERE to_currency = 'Euro'"), Collections.emptyList()); testBatchRead( buildQuery(table, "*", "WHERE from_currency = 'HK Dollar'"), Arrays.asList( changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01", "11"), changelogRow("+I", "HK Dollar", "US Dollar", 0.129d, "2022-01-01", "12"))); testBatchRead( buildQuery(table, "*", "WHERE rate_by_to_currency > 0.5"), Arrays.asList( changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01", "11"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01", "11"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "11"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01", "12"), changelogRow("+I", "Euro", "US Dollar", 1.12d, "2022-01-01", "12"), changelogRow( "+I", "Singapore Dollar", "US Dollar", 0.741d, "2022-01-01", "12"), changelogRow("+I", "US Dollar", "Thai Baht", 33.51d, "2022-01-02", "23"))); // test partition and field filter testBatchRead( buildQuery(table, "*", "WHERE rate_by_to_currency > 1 AND hh = '12'"), Collections.singletonList( changelogRow("+I", "Euro", "US Dollar", 1.12, "2022-01-01", "12"))); // test projection testBatchRead( buildQuery(table, "from_currency, dt, hh", ""), Arrays.asList( // dt = 2022-01-01, hh = 11 changelogRow("+I", "Euro", "2022-01-01", "11"), changelogRow("+I", "HK Dollar", "2022-01-01", "11"), changelogRow("+I", "Singapore Dollar", "2022-01-01", "11"), changelogRow("+I", "Yen", "2022-01-01", "11"), changelogRow("+I", "US Dollar", "2022-01-01", "11"), // dt = 2022-01-01, hh = 12 changelogRow("+I", "US Dollar", "2022-01-01", "12"), changelogRow("+I", "Euro", "2022-01-01", "12"), changelogRow("+I", "HK Dollar", "2022-01-01", "12"), changelogRow("+I", "Singapore Dollar", "2022-01-01", "12"), changelogRow("+I", "Yen", "2022-01-01", "12"), // dt = 2022-01-02, hh = 23 changelogRow("+I", "US Dollar", "2022-01-02", "23"))); // test projection and filter testBatchRead( buildQuery( table, "from_currency, to_currency", "WHERE dt = '2022-01-01' AND hh >= '12' OR rate_by_to_currency > 2"), Arrays.asList( // dt = 2022-01-01, hh = 12 changelogRow("+I", "US Dollar", "US Dollar"), changelogRow("+I", "Euro", "US Dollar"), changelogRow("+I", "HK Dollar", "US Dollar"), changelogRow("+I", "Singapore Dollar", "US Dollar"), changelogRow("+I", "Yen", "US Dollar"), // dt = 2022-01-02, hh = 23 changelogRow("+I", "US Dollar", "Thai Baht"))); } @Test public void testBatchWriteWithSinglePartitionedRecordsWithMultiPk() throws Exception { List<Row> initialRecords = Arrays.asList( // to_currency is USD changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01"), changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01"), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0082d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-02"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-02"), // to_currency is Euro changelogRow("+I", "US Dollar", "Euro", 0.9d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d, "2022-01-01"), // to_currency is Yen changelogRow("+I", "Yen", "Yen", 1.0d, "2022-01-01"), changelogRow("+I", "Chinese Yuan", "Yen", 19.25d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Yen", 90.32d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Yen", 122.46d, "2022-01-01")); String temporaryTable = createTemporaryTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE", "dt STRING"), Arrays.asList("from_currency", "to_currency", "dt"), Collections.singletonList("dt"), initialRecords, "dt:2022-01-01;dt:2022-01-02", true, "I"); String table = createTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE", "dt STRING"), Arrays.asList("from_currency", "to_currency", "dt"), Collections.emptyList(), Collections.singletonList("dt"), staticPartitionOverwrite); insertIntoFromTable(temporaryTable, table); checkFileStorePath(table, Arrays.asList("dt=2022-01-01", "dt=2022-01-02")); // test batch read testBatchRead( buildSimpleQuery(table), Arrays.asList( // to_currency is USD changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01"), changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01"), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0082d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "US Dollar", 0.74d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-02"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-02"), // to_currency is Euro changelogRow("+I", "US Dollar", "Euro", 0.9d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d, "2022-01-01"), // to_currency is Yen changelogRow("+I", "Yen", "Yen", 1.0d, "2022-01-01"), changelogRow("+I", "Chinese Yuan", "Yen", 19.25d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Yen", 122.46d, "2022-01-01"))); // overwrite dynamic partition insertOverwrite(table, "('US Dollar', 'Thai Baht', 33.51, '2022-01-01')"); // batch read to check partition refresh testBatchRead( buildSimpleQuery(table), Collections.singletonList( changelogRow("+I", "US Dollar", "Thai Baht", 33.51d, "2022-01-01"))); // reset table table = createTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE", "dt STRING"), Arrays.asList("from_currency", "to_currency", "dt"), Collections.emptyList(), Collections.singletonList("dt")); insertIntoFromTable(temporaryTable, table); // test partition filter testBatchRead( buildQuery(table, "*", "WHERE dt = '2022-01-02'"), Arrays.asList( changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-02"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-02"))); // test field filter testBatchRead( buildQuery(table, "*", "WHERE rate_by_to_currency < 0.1"), Arrays.asList( changelogRow("+I", "Yen", "US Dollar", 0.0082d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-02"))); // test partition and field filter testBatchRead( buildQuery(table, "*", "WHERE rate_by_to_currency > 0.9 OR dt = '2022-01-02'"), Arrays.asList( changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-01"), changelogRow("+I", "Euro", "US Dollar", 1.11d, "2022-01-01"), changelogRow("+I", "Yen", "Yen", 1.0d, "2022-01-01"), changelogRow("+I", "Chinese Yuan", "Yen", 19.25d, "2022-01-01"), changelogRow("+I", "Singapore Dollar", "Yen", 122.46d, "2022-01-01"), changelogRow("+I", "Yen", "US Dollar", 0.0081d, "2022-01-02"), changelogRow("+I", "US Dollar", "US Dollar", 1.0d, "2022-01-02"))); // test projection testBatchRead( buildQuery(table, "rate_by_to_currency, dt", ""), Arrays.asList( changelogRow("+I", 1.0d, "2022-01-01"), // US Dollar,US Dollar changelogRow("+I", 1.11d, "2022-01-01"), // Euro,US Dollar changelogRow("+I", 0.13d, "2022-01-01"), // HK Dollar,US Dollar changelogRow("+I", 0.0082d, "2022-01-01"), // Yen,US Dollar changelogRow("+I", 0.74d, "2022-01-01"), // Singapore Dollar,USDollar changelogRow("+I", 0.9d, "2022-01-01"), // US Dollar,Euro changelogRow("+I", 0.67d, "2022-01-01"), // Singapore Dollar,Euro changelogRow("+I", 1.0d, "2022-01-01"), // Yen,Yen changelogRow("+I", 19.25d, "2022-01-01"), // Chinese Yuan,Yen changelogRow("+I", 122.46d, "2022-01-01"), // Singapore Dollar,Yen changelogRow("+I", 0.0081d, "2022-01-02"), // Yen,US Dollar changelogRow("+I", 1.0d, "2022-01-02") // US Dollar,US Dollar )); // test projection and filter testBatchRead( buildQuery( table, "from_currency, to_currency", "WHERE dt = '2022-01-02' OR rate_by_to_currency > 100"), Arrays.asList( changelogRow("+I", "Yen", "US Dollar"), changelogRow("+I", "US Dollar", "US Dollar"), changelogRow("+I", "Singapore Dollar", "Yen"))); } @Test public void testBatchWriteWithNonPartitionedRecordsWithMultiPk() throws Exception { List<Row> initialRecords = Arrays.asList( // to_currency is USD changelogRow("+I", "US Dollar", "US Dollar", 1.0d), changelogRow("+I", "Euro", "US Dollar", 1.11d), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d), changelogRow("+I", "Yen", "US Dollar", 0.0082d), changelogRow("+I", "Singapore Dollar", "US Dollar", 0.74d), changelogRow("+I", "Yen", "US Dollar", 0.0081d), changelogRow("+I", "US Dollar", "US Dollar", 1.0d), // to_currency is Euro changelogRow("+I", "US Dollar", "Euro", 0.9d), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d), // to_currency is Yen changelogRow("+I", "Yen", "Yen", 1.0d), changelogRow("+I", "Chinese Yuan", "Yen", 19.25d), changelogRow("+I", "Singapore Dollar", "Yen", 90.32d), changelogRow("+I", "Singapore Dollar", "Yen", 122.46d)); String temporaryTable = createTemporaryTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE"), Arrays.asList("from_currency", "to_currency"), Collections.emptyList(), initialRecords, null, true, "I"); String table = createTable( Arrays.asList( "from_currency STRING", "to_currency STRING", "rate_by_to_currency DOUBLE"), Arrays.asList("from_currency", "to_currency"), Collections.emptyList(), Collections.emptyList()); insertIntoFromTable(temporaryTable, table); checkFileStorePath(table, Collections.emptyList()); // test batch read testBatchRead( buildSimpleQuery(table), Arrays.asList( // to_currency is USD changelogRow("+I", "Euro", "US Dollar", 1.11d), changelogRow("+I", "HK Dollar", "US Dollar", 0.13d), changelogRow("+I", "Singapore Dollar", "US Dollar", 0.74d), changelogRow("+I", "Yen", "US Dollar", 0.0081d), changelogRow("+I", "US Dollar", "US Dollar", 1.0d), // to_currency is Euro changelogRow("+I", "US Dollar", "Euro", 0.9d), changelogRow("+I", "Singapore Dollar", "Euro", 0.67d), // to_currency is Yen changelogRow("+I", "Yen", "Yen", 1.0d), changelogRow("+I", "Chinese Yuan", "Yen", 19.25d), changelogRow("+I", "Singapore Dollar", "Yen", 122.46d))); // test field filter testBatchRead( buildQuery(table, "*", "WHERE rate_by_to_currency < 0.1"), Collections.singletonList(changelogRow("+I", "Yen", "US Dollar", 0.0081d))); // test projection testBatchRead( buildQuery(table, "rate_by_to_currency", ""), Arrays.asList( changelogRow("+I", 1.11d), // Euro, US Dollar changelogRow("+I", 0.13d), // HK Dollar, US Dollar changelogRow("+I", 0.74d), // Singapore Dollar, US Dollar changelogRow("+I", 0.0081d), // Yen, US Dollar changelogRow("+I", 1.0d), // US Dollar, US Dollar changelogRow("+I", 0.9d), // US Dollar, Euro changelogRow("+I", 0.67d), // Singapore Dollar, Euro changelogRow("+I", 1.0d), // Yen, Yen changelogRow("+I", 19.25d), // Chinese Yuan, Yen changelogRow("+I", 122.46d) // Singapore Dollar, Yen )); // test projection and filter testBatchRead( buildQuery(table, "from_currency, to_currency", "WHERE rate_by_to_currency > 100"), Collections.singletonList(changelogRow("+I", "Singapore Dollar", "Yen"))); } @Test public void testBatchWriteMultiPartitionedRecordsWithOnePk() throws Exception { List<Row> initialRecords = Arrays.asList( // dt = 2022-01-01, hh = 00 changelogRow("+I", "US Dollar", 102L, "2022-01-01", "00"), changelogRow("+I", "Euro", 114L, "2022-01-01", "00"), changelogRow("+I", "Yen", 1L, "2022-01-01", "00"), changelogRow("+I", "Euro", 114L, "2022-01-01", "00"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "00"), // dt = 2022-01-01, hh = 20 changelogRow("+I", "US Dollar", 102L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "Yen", 1L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "20"), // dt = 2022-01-02, hh = 12 changelogRow("+I", "Euro", 119L, "2022-01-02", "12")); String temporaryTable = createTemporaryTable( Arrays.asList("currency STRING", "rate BIGINT", "dt STRING", "hh STRING"), Arrays.asList("currency", "dt", "hh"), Arrays.asList("dt", "hh"), initialRecords, "dt:2022-01-01,hh:00;dt:2022-01-01,hh:20;dt:2022-01-02,hh:12", true, "I"); String table = createTable( Arrays.asList("currency STRING", "rate BIGINT", "dt STRING", "hh STRING"), Arrays.asList("currency", "dt", "hh"), Collections.emptyList(), Arrays.asList("dt", "hh")); insertIntoFromTable(temporaryTable, table); checkFileStorePath( table, Arrays.asList("dt=2022-01-01,hh=00", "dt=2022-01-01,hh=20", "dt=2022-01-02,hh=12")); // test batch read testBatchRead( buildSimpleQuery(table), Arrays.asList( // dt = 2022-01-01, hh = 00 changelogRow("+I", "Yen", 1L, "2022-01-01", "00"), changelogRow("+I", "Euro", 114L, "2022-01-01", "00"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "00"), // dt = 2022-01-01, hh = 20 changelogRow("+I", "Yen", 1L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "20"), // dt = 2022-01-02, hh = 12 changelogRow("+I", "Euro", 119L, "2022-01-02", "12"))); // test partition filter testBatchRead( buildQuery(table, "*", "WHERE hh >= '10'"), Arrays.asList( changelogRow("+I", "Yen", 1L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "20"), changelogRow("+I", "Euro", 119L, "2022-01-02", "12"))); // test field filter testBatchRead( buildQuery(table, "*", "WHERE rate >= 119"), Collections.singletonList(changelogRow("+I", "Euro", 119L, "2022-01-02", "12"))); // test projection testBatchRead( buildQuery(table, "hh", ""), Arrays.asList( changelogRow("+I", "00"), // Yen, 1L, 2022-01-01 changelogRow("+I", "00"), // Euro, 114L, 2022-01-01 changelogRow("+I", "00"), // US Dollar, 114L, 2022-01-01 changelogRow("+I", "20"), // Yen, 1L, 2022-01-01 changelogRow("+I", "20"), // Euro, 114L, 2022-01-01 changelogRow("+I", "20"), // US Dollar, 114L, 2022-01-01 changelogRow("+I", "12") // Euro, 119L, "2022-01-02 )); // test projection and filter testBatchRead( buildQuery(table, "rate", "WHERE rate > 100 AND hh >= '20'"), Collections.nCopies(2, changelogRow("+I", 114L))); } @Test public void testBatchWriteMultiPartitionedRecordsWithoutPk() throws Exception { List<Row> initialRecords = Arrays.asList( // dt = 2022-01-01, hh = 00 changelogRow("+I", "US Dollar", 102L, "2022-01-01", "00"), changelogRow("+I", "Euro", 114L, "2022-01-01", "00"), changelogRow("+I", "Yen", 1L, "2022-01-01", "00"), changelogRow("+I", "Euro", 114L, "2022-01-01", "00"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "00"), // dt = 2022-01-01, hh = 20 changelogRow("+I", "US Dollar", 102L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "Yen", 1L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "20"), // dt = 2022-01-02, hh = 12 changelogRow("+I", "Euro", 119L, "2022-01-02", "12")); String temporaryTable = createTemporaryTable( Arrays.asList("currency STRING", "rate BIGINT", "dt STRING", "hh STRING"), Collections.emptyList(), Arrays.asList("dt", "hh"), initialRecords, "dt:2022-01-01,hh:00;dt:2022-01-01,hh:20;dt:2022-01-02,hh:12", true, "I"); String table = createTable( Arrays.asList("currency STRING", "rate BIGINT", "dt STRING", "hh STRING"), Collections.emptyList(), Collections.singletonList("currency"), Arrays.asList("dt", "hh"), staticPartitionOverwrite); insertIntoFromTable(temporaryTable, table); checkFileStorePath( table, Arrays.asList("dt=2022-01-01,hh=00", "dt=2022-01-01,hh=20", "dt=2022-01-02,hh=12")); // test batch read testBatchRead(buildSimpleQuery(table), initialRecords); // dynamic overwrite the whole table with null partitions bEnv.executeSql( String.format( "INSERT OVERWRITE `%s` SELECT 'Yen', 1, CAST(NULL AS STRING), CAST(NULL AS STRING) FROM `%s`", table, table)) .await(); // check new partition path checkFileStorePath(table, Collections.singletonList("dt=null,hh=null")); // batch read to check data refresh testBatchRead( buildSimpleQuery(table), Collections.nCopies(11, changelogRow("+I", "Yen", 1L, null, null))); // reset table table = createTable( Arrays.asList("currency STRING", "rate BIGINT", "dt STRING", "hh STRING"), Collections.emptyList(), Collections.singletonList("currency"), Arrays.asList("dt", "hh")); insertIntoFromTable(temporaryTable, table); // test partition filter testBatchRead( buildQuery(table, "*", "WHERE hh >= '10'"), Arrays.asList( changelogRow("+I", "US Dollar", 102L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "Yen", 1L, "2022-01-01", "20"), changelogRow("+I", "Euro", 114L, "2022-01-01", "20"), changelogRow("+I", "US Dollar", 114L, "2022-01-01", "20"), changelogRow("+I", "Euro", 119L, "2022-01-02", "12"))); // test field filter testBatchRead( buildQuery(table, "*", "WHERE rate >= 119"), Collections.singletonList(changelogRow("+I", "Euro", 119L, "2022-01-02", "12"))); // test projection testBatchRead( buildQuery(table, "currency", ""), Arrays.asList( // dt = 2022-01-01, hh = 00 changelogRow("+I", "US Dollar"), changelogRow("+I", "Euro"), changelogRow("+I", "Yen"), changelogRow("+I", "Euro"), changelogRow("+I", "US Dollar"), // dt = 2022-01-01, hh = 20 changelogRow("+I", "US Dollar"), changelogRow("+I", "Euro"), changelogRow("+I", "Yen"), changelogRow("+I", "Euro"), changelogRow("+I", "US Dollar"), // dt = 2022-01-02, hh = 12 changelogRow("+I", "Euro"))); // test projection and filter testBatchRead( buildQuery(table, "rate", "WHERE rate > 110 AND hh >= '20'"), Collections.nCopies(3, changelogRow("+I", 114L))); } }
googleapis/google-cloud-java
37,883
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListArtifactsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/metadata_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Response message for * [MetadataService.ListArtifacts][google.cloud.aiplatform.v1beta1.MetadataService.ListArtifacts]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListArtifactsResponse} */ public final class ListArtifactsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListArtifactsResponse) ListArtifactsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListArtifactsResponse.newBuilder() to construct. private ListArtifactsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListArtifactsResponse() { artifacts_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListArtifactsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListArtifactsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListArtifactsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.class, com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.Builder.class); } public static final int ARTIFACTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1beta1.Artifact> artifacts_; /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1beta1.Artifact> getArtifactsList() { return artifacts_; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder> getArtifactsOrBuilderList() { return artifacts_; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ @java.lang.Override public int getArtifactsCount() { return artifacts_.size(); } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Artifact getArtifacts(int index) { return artifacts_.get(index); } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder getArtifactsOrBuilder(int index) { return artifacts_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < artifacts_.size(); i++) { output.writeMessage(1, artifacts_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < artifacts_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, artifacts_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse other = (com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse) obj; if (!getArtifactsList().equals(other.getArtifactsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getArtifactsCount() > 0) { hash = (37 * hash) + ARTIFACTS_FIELD_NUMBER; hash = (53 * hash) + getArtifactsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [MetadataService.ListArtifacts][google.cloud.aiplatform.v1beta1.MetadataService.ListArtifacts]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.ListArtifactsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListArtifactsResponse) com.google.cloud.aiplatform.v1beta1.ListArtifactsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListArtifactsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListArtifactsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.class, com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (artifactsBuilder_ == null) { artifacts_ = java.util.Collections.emptyList(); } else { artifacts_ = null; artifactsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1beta1_ListArtifactsResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse build() { com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse buildPartial() { com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse result = new com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse result) { if (artifactsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { artifacts_ = java.util.Collections.unmodifiableList(artifacts_); bitField0_ = (bitField0_ & ~0x00000001); } result.artifacts_ = artifacts_; } else { result.artifacts_ = artifactsBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse other) { if (other == com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse.getDefaultInstance()) return this; if (artifactsBuilder_ == null) { if (!other.artifacts_.isEmpty()) { if (artifacts_.isEmpty()) { artifacts_ = other.artifacts_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureArtifactsIsMutable(); artifacts_.addAll(other.artifacts_); } onChanged(); } } else { if (!other.artifacts_.isEmpty()) { if (artifactsBuilder_.isEmpty()) { artifactsBuilder_.dispose(); artifactsBuilder_ = null; artifacts_ = other.artifacts_; bitField0_ = (bitField0_ & ~0x00000001); artifactsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getArtifactsFieldBuilder() : null; } else { artifactsBuilder_.addAllMessages(other.artifacts_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.Artifact m = input.readMessage( com.google.cloud.aiplatform.v1beta1.Artifact.parser(), extensionRegistry); if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); artifacts_.add(m); } else { artifactsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1beta1.Artifact> artifacts_ = java.util.Collections.emptyList(); private void ensureArtifactsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { artifacts_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.Artifact>(artifacts_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Artifact, com.google.cloud.aiplatform.v1beta1.Artifact.Builder, com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder> artifactsBuilder_; /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Artifact> getArtifactsList() { if (artifactsBuilder_ == null) { return java.util.Collections.unmodifiableList(artifacts_); } else { return artifactsBuilder_.getMessageList(); } } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public int getArtifactsCount() { if (artifactsBuilder_ == null) { return artifacts_.size(); } else { return artifactsBuilder_.getCount(); } } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Artifact getArtifacts(int index) { if (artifactsBuilder_ == null) { return artifacts_.get(index); } else { return artifactsBuilder_.getMessage(index); } } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder setArtifacts(int index, com.google.cloud.aiplatform.v1beta1.Artifact value) { if (artifactsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureArtifactsIsMutable(); artifacts_.set(index, value); onChanged(); } else { artifactsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder setArtifacts( int index, com.google.cloud.aiplatform.v1beta1.Artifact.Builder builderForValue) { if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); artifacts_.set(index, builderForValue.build()); onChanged(); } else { artifactsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder addArtifacts(com.google.cloud.aiplatform.v1beta1.Artifact value) { if (artifactsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureArtifactsIsMutable(); artifacts_.add(value); onChanged(); } else { artifactsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder addArtifacts(int index, com.google.cloud.aiplatform.v1beta1.Artifact value) { if (artifactsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureArtifactsIsMutable(); artifacts_.add(index, value); onChanged(); } else { artifactsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder addArtifacts( com.google.cloud.aiplatform.v1beta1.Artifact.Builder builderForValue) { if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); artifacts_.add(builderForValue.build()); onChanged(); } else { artifactsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder addArtifacts( int index, com.google.cloud.aiplatform.v1beta1.Artifact.Builder builderForValue) { if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); artifacts_.add(index, builderForValue.build()); onChanged(); } else { artifactsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder addAllArtifacts( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.Artifact> values) { if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, artifacts_); onChanged(); } else { artifactsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder clearArtifacts() { if (artifactsBuilder_ == null) { artifacts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { artifactsBuilder_.clear(); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public Builder removeArtifacts(int index) { if (artifactsBuilder_ == null) { ensureArtifactsIsMutable(); artifacts_.remove(index); onChanged(); } else { artifactsBuilder_.remove(index); } return this; } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Artifact.Builder getArtifactsBuilder(int index) { return getArtifactsFieldBuilder().getBuilder(index); } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder getArtifactsOrBuilder(int index) { if (artifactsBuilder_ == null) { return artifacts_.get(index); } else { return artifactsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder> getArtifactsOrBuilderList() { if (artifactsBuilder_ != null) { return artifactsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(artifacts_); } } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Artifact.Builder addArtifactsBuilder() { return getArtifactsFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1beta1.Artifact.getDefaultInstance()); } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.Artifact.Builder addArtifactsBuilder(int index) { return getArtifactsFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1beta1.Artifact.getDefaultInstance()); } /** * * * <pre> * The Artifacts retrieved from the MetadataStore. * </pre> * * <code>repeated .google.cloud.aiplatform.v1beta1.Artifact artifacts = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1beta1.Artifact.Builder> getArtifactsBuilderList() { return getArtifactsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Artifact, com.google.cloud.aiplatform.v1beta1.Artifact.Builder, com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder> getArtifactsFieldBuilder() { if (artifactsBuilder_ == null) { artifactsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Artifact, com.google.cloud.aiplatform.v1beta1.Artifact.Builder, com.google.cloud.aiplatform.v1beta1.ArtifactOrBuilder>( artifacts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); artifacts_ = null; } return artifactsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as * [ListArtifactsRequest.page_token][google.cloud.aiplatform.v1beta1.ListArtifactsRequest.page_token] * to retrieve the next page. * If this field is not populated, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListArtifactsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListArtifactsResponse) private static final com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse(); } public static com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListArtifactsResponse> PARSER = new com.google.protobuf.AbstractParser<ListArtifactsResponse>() { @java.lang.Override public ListArtifactsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListArtifactsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListArtifactsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ListArtifactsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
38,231
langtools/test/tools/javac/TryWithResources/TwrTests.java
/* * Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 6911256 6964740 * @summary Tests of generated TWR code. */ import java.util.List; import java.util.ArrayList; public class TwrTests { public static void main(String[] args) { testCreateFailure1(); testCreateFailure2(); testCreateFailure2Nested(); testCreateFailure3(); testCreateFailure3Nested(); testCreateFailure4(); testCreateFailure4Nested(); testCreateFailure5(); testCreateFailure5Nested(); testCreateSuccess1(); testCreateSuccess2(); testCreateSuccess2Nested(); testCreateSuccess3(); testCreateSuccess3Nested(); testCreateSuccess4(); testCreateSuccess4Nested(); testCreateSuccess5(); testCreateSuccess5Nested(); } /* * The following tests simulate a creation failure of every possible * resource in an TWR block, and check to make sure that the failure * prevents creation of subsequent resources, and that all created * resources are properly closed, even if one or more of the close * attempts fails. */ public static void testCreateFailure1() { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(0); try (Resource r0 = createResource(0, 0, 0, closedList)) { throw new AssertionError("Resource creation succeeded"); } catch (Resource.CreateFailException e) { creationFailuresDetected++; if (e.resourceId() != 0) { throw new AssertionError("Wrong resource creation " + e.resourceId() + " failed"); } } catch (Resource.CloseFailException e) { throw new AssertionError("Unexpected CloseFailException: " + e.resourceId()); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, 0); } public static void testCreateFailure2() { for (int createFailureId = 0; createFailureId < 2; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList); Resource r1 = createResource(1, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed"); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure2Nested() { for (int createFailureId = 0; createFailureId < 2; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList)) { try(Resource r1 = createResource(1, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed"); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure3() { for (int createFailureId = 0; createFailureId < 3; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList); Resource r1 = createResource(1, createFailureId, bitMap, closedList); Resource r2 = createResource(2, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure3Nested() { for (int createFailureId = 0; createFailureId < 3; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList)) { try (Resource r1 = createResource(1, createFailureId, bitMap, closedList)) { try (Resource r2 = createResource(2, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } } } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure4() { for (int createFailureId = 0; createFailureId < 4; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList); Resource r1 = createResource(1, createFailureId, bitMap, closedList); Resource r2 = createResource(2, createFailureId, bitMap, closedList); Resource r3 = createResource(3, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure4Nested() { for (int createFailureId = 0; createFailureId < 4; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList)) { try (Resource r1 = createResource(1, createFailureId, bitMap, closedList)) { try (Resource r2 = createResource(2, createFailureId, bitMap, closedList)) { try (Resource r3 = createResource(3, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } } } } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure5() { for (int createFailureId = 0; createFailureId < 5; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList); Resource r1 = createResource(1, createFailureId, bitMap, closedList); Resource r2 = createResource(2, createFailureId, bitMap, closedList); Resource r3 = createResource(3, createFailureId, bitMap, closedList); Resource r4 = createResource(4, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } public static void testCreateFailure5Nested() { for (int createFailureId = 0; createFailureId < 5; createFailureId++) { for (int bitMap = 0, n = 1 << createFailureId; bitMap < n; bitMap++) { int creationFailuresDetected = 0; List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, createFailureId, bitMap, closedList)) { try (Resource r1 = createResource(1, createFailureId, bitMap, closedList)) { try (Resource r2 = createResource(2, createFailureId, bitMap, closedList)) { try (Resource r3 = createResource(3, createFailureId, bitMap, closedList)) { try (Resource r4 = createResource(4, createFailureId, bitMap, closedList)) { throw new AssertionError("Entire resource creation succeeded"); } } } } } catch (Resource.CreateFailException e) { creationFailuresDetected++; checkCreateFailureId(e.resourceId(), createFailureId); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { throw new AssertionError("Secondary exception suppression failed:" + e); } checkForSingleCreationFailure(creationFailuresDetected); checkClosedList(closedList, createFailureId); } } } /** * Create a resource with the specified ID. The ID must be less than createFailureId. * A subsequent attempt to close the resource will fail iff the corresponding bit * is set in closeFailureBitMap. When an attempt is made to close this resource, * its ID will be added to closedList, regardless of whether the attempt succeeds. * * @param id the ID of this resource * @param createFailureId the ID of the resource whose creation will fail * @param closeFailureBitMap a bit vector describing which resources should throw an * exception when close is attempted * @param closedList a list on which to record resource close attempts * @throws AssertionError if no attempt should be made to create this resource */ private static Resource createResource(int id, int createFailureId, int closeFailureBitMap, List<Integer> closedList) throws Resource.CreateFailException { if (id > createFailureId) throw new AssertionError("Resource " + id + " shouldn't be created"); boolean createSucceeds = id != createFailureId; boolean closeSucceeds = (closeFailureBitMap & (1 << id)) == 0; return new Resource(id, createSucceeds, closeSucceeds, closedList); } /** * Check that an observed creation failure has the expected resource ID. * * @param foundId the ID of the resource whose creation failed * @param expectedId the ID of the resource whose creation should have failed */ private static void checkCreateFailureId(int foundId, int expectedId) { if (foundId != expectedId) throw new AssertionError("Wrong resource creation failed. Found ID " + foundId + " expected " + expectedId); } /** * Check for proper suppressed exceptions in proper order. * * @param suppressedExceptions the suppressed exceptions array returned by * getSuppressed() * @bitmap a bitmap indicating which suppressed exceptions are expected. * Bit i is set iff id should throw a CloseFailException. */ private static void checkSuppressedExceptions(Throwable[] suppressedExceptions, int bitMap) { if (suppressedExceptions.length != Integer.bitCount(bitMap)) throw new AssertionError("Expected " + Integer.bitCount(bitMap) + " suppressed exceptions, got " + suppressedExceptions.length); int prevCloseFailExceptionId = Integer.MAX_VALUE; for (Throwable t : suppressedExceptions) { int id = ((Resource.CloseFailException) t).resourceId(); if ((1 << id & bitMap) == 0) throw new AssertionError("Unexpected suppressed CloseFailException: " + id); if (id > prevCloseFailExceptionId) throw new AssertionError("Suppressed CloseFailException" + id + " followed " + prevCloseFailExceptionId); } } /** * Check that exactly one resource creation failed. * * @param numCreationFailuresDetected the number of creation failures detected */ private static void checkForSingleCreationFailure(int numCreationFailuresDetected) { if (numCreationFailuresDetected != 1) throw new AssertionError("Wrong number of creation failures: " + numCreationFailuresDetected); } /** * Check that a close was attempted on every resourced that was successfully opened, * and that the close attempts occurred in the proper order. * * @param closedList the resource IDs of the close attempts, in the order they occurred * @param the ID of the resource whose creation failed. Close attempts should occur * for all previous resources, in reverse order. */ private static void checkClosedList(List<Integer> closedList, int createFailureId) { List<Integer> expectedList = new ArrayList<Integer>(createFailureId); for (int i = createFailureId - 1; i >= 0; i--) expectedList.add(i); if (!closedList.equals(expectedList)) throw new AssertionError("Closing sequence " + closedList + " != " + expectedList); } /* * The following tests simulate the creation of several resources, followed * by success or failure of forward processing. They test that all resources * are properly closed, even if one or more of the close attempts fails. */ public static void testCreateSuccess1() { for (int bitMap = 0, n = 1 << 1; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 1); } } } public static void testCreateSuccess2() { for (int bitMap = 0, n = 1 << 2; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList); Resource r1 = createResource(1, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 2); } } } public static void testCreateSuccess2Nested() { for (int bitMap = 0, n = 1 << 2; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList)) { try (Resource r1 = createResource(1, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 2); } } } public static void testCreateSuccess3() { for (int bitMap = 0, n = 1 << 3; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList); Resource r1 = createResource(1, bitMap, closedList); Resource r2 = createResource(2, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 3); } } } public static void testCreateSuccess3Nested() { for (int bitMap = 0, n = 1 << 3; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList)) { try (Resource r1 = createResource(1, bitMap, closedList)) { try (Resource r2 = createResource(2, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } } } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 3); } } } public static void testCreateSuccess4() { for (int bitMap = 0, n = 1 << 4; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList); Resource r1 = createResource(1, bitMap, closedList); Resource r2 = createResource(2, bitMap, closedList); Resource r3 = createResource(3, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 4); } } } public static void testCreateSuccess4Nested() { for (int bitMap = 0, n = 1 << 4; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList)) { try (Resource r1 = createResource(1, bitMap, closedList)) { try (Resource r2 = createResource(2, bitMap, closedList)) { try (Resource r3 = createResource(3, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } } } } catch (Resource.CreateFailException e) { throw new AssertionError( "Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 4); } } } public static void testCreateSuccess5() { for (int bitMap = 0, n = 1 << 5; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList); Resource r1 = createResource(1, bitMap, closedList); Resource r2 = createResource(2, bitMap, closedList); Resource r3 = createResource(3, bitMap, closedList); Resource r4 = createResource(4, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } catch (Resource.CreateFailException e) { throw new AssertionError("Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 5); } } } public static void testCreateSuccess5Nested() { for (int bitMap = 0, n = 1 << 5; bitMap < n; bitMap++) { for (int failure = 0; failure < 2; failure++) { List<Integer> closedList = new ArrayList<Integer>(); try (Resource r0 = createResource(0, bitMap, closedList)) { try (Resource r1 = createResource(1, bitMap, closedList)) { try (Resource r2 = createResource(2, bitMap, closedList)) { try (Resource r3 = createResource(3, bitMap, closedList)) { try (Resource r4 = createResource(4, bitMap, closedList)) { if (failure != 0) throw new MyKindOfException(); } } } } } catch (Resource.CreateFailException e) { throw new AssertionError("Resource creation failed: " + e.resourceId()); } catch (MyKindOfException e) { if (failure == 0) throw new AssertionError("Unexpected MyKindOfException"); checkSuppressedExceptions(e.getSuppressed(), bitMap); } catch (Resource.CloseFailException e) { if (failure == 1) throw new AssertionError("Secondary exception suppression failed"); int id = e.resourceId(); if (bitMap == 0) throw new AssertionError("Unexpected CloseFailException: " + id); int highestCloseFailBit = Integer.highestOneBit(bitMap); if (1 << id != highestCloseFailBit) { throw new AssertionError("CloseFailException: got id " + id + ", expected lg(" + highestCloseFailBit +")"); } checkSuppressedExceptions(e.getSuppressed(), bitMap & ~highestCloseFailBit); } checkClosedList(closedList, 5); } } } private static Resource createResource(int id, int closeFailureBitMap, List<Integer> closedList) throws Resource.CreateFailException { boolean closeSucceeds = (closeFailureBitMap & (1 << id)) == 0; return new Resource(id, true, closeSucceeds, closedList); } private static class MyKindOfException extends Exception { } } class Resource implements AutoCloseable { /** A number identifying this resource */ private final int resourceId; /** Whether the close call on this resource should succeed or fail */ private final boolean closeSucceeds; /** When resource is closed, it records its ID in this list */ private final List<Integer> closedList; Resource(int resourceId, boolean createSucceeds, boolean closeSucceeds, List<Integer> closedList) throws CreateFailException { if (!createSucceeds) throw new CreateFailException(resourceId); this.resourceId = resourceId; this.closeSucceeds = closeSucceeds; this.closedList = closedList; } public void close() throws CloseFailException { closedList.add(resourceId); if (!closeSucceeds) throw new CloseFailException(resourceId); } public static class ResourceException extends RuntimeException { private final int resourceId; public ResourceException(int resourceId) { super("Resource ID = " + resourceId); this.resourceId = resourceId; } public int resourceId() { return resourceId; } } public static class CreateFailException extends ResourceException { public CreateFailException(int resourceId) { super(resourceId); } } public static class CloseFailException extends ResourceException { public CloseFailException(int resourceId) { super(resourceId); } } }
apache/lucene
38,005
lucene/spatial-test-fixtures/src/java/org/apache/lucene/spatial3d/tests/RandomGeo3dShapeGenerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.spatial3d.tests; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import org.apache.lucene.spatial3d.geom.GeoArea; import org.apache.lucene.spatial3d.geom.GeoAreaShape; import org.apache.lucene.spatial3d.geom.GeoBBox; import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; import org.apache.lucene.spatial3d.geom.GeoCircle; import org.apache.lucene.spatial3d.geom.GeoCircleFactory; import org.apache.lucene.spatial3d.geom.GeoCompositeAreaShape; import org.apache.lucene.spatial3d.geom.GeoCompositeMembershipShape; import org.apache.lucene.spatial3d.geom.GeoPath; import org.apache.lucene.spatial3d.geom.GeoPathFactory; import org.apache.lucene.spatial3d.geom.GeoPoint; import org.apache.lucene.spatial3d.geom.GeoPointShape; import org.apache.lucene.spatial3d.geom.GeoPointShapeFactory; import org.apache.lucene.spatial3d.geom.GeoPolygon; import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; import org.apache.lucene.spatial3d.geom.GeoShape; import org.apache.lucene.spatial3d.geom.PlanetModel; /** * Class for generating random Geo3dShapes. They can be generated under given constraints which are * expressed as a shape and a relationship. * * <p>note that convexity for polygons is defined as polygons that contains antipodal points, * otherwise they are convex. Internally they can be created using GeoConvexPolygons and * GeoConcavePolygons. */ public final class RandomGeo3dShapeGenerator { /* Max num of iterations to find right shape under given constrains */ private static final int MAX_SHAPE_ITERATIONS = 20; /* Max num of iterations to find right point under given constrains */ private static final int MAX_POINT_ITERATIONS = 1000; /* Supported shapes */ public static final int CONVEX_POLYGON = 0; public static final int CONVEX_POLYGON_WITH_HOLES = 1; public static final int CONCAVE_POLYGON = 2; public static final int CONCAVE_POLYGON_WITH_HOLES = 3; public static final int COMPLEX_POLYGON = 4; public static final int CIRCLE = 5; public static final int RECTANGLE = 6; public static final int PATH = 7; public static final int COLLECTION = 8; public static final int POINT = 9; public static final int LINE = 10; public static final int EXACT_CIRCLE = 11; /* Helper shapes for generating constraints whch are just three sided polygons */ public static final int CONVEX_SIMPLE_POLYGON = 500; public static final int CONCAVE_SIMPLE_POLYGON = 501; /** Static methods only. */ private RandomGeo3dShapeGenerator() {} /** * @return Returns a private-use random forked from the current {@link RandomizedContext}. */ private static Random random() { return new Random(RandomizedContext.current().getRandom().nextLong()); } /** * Method that returns a random generated Planet model from the supported Planet models. currently * SPHERE and WGS84 * * @return a random generated Planet model */ public static PlanetModel randomPlanetModel() { final int shapeType = random().nextInt(2); switch (shapeType) { case 0: { return PlanetModel.SPHERE; } case 1: { return PlanetModel.WGS84; } default: throw new IllegalStateException("Unexpected planet model"); } } /** * Method that returns a random generated a random Shape code from all supported shapes. * * @return a random generated shape code */ public static int randomShapeType() { return random().nextInt(12); } /** * Method that returns a random generated GeoAreaShape code from all supported GeoAreaShapes. * * <p>We are removing Collections because it is difficult to create shapes with properties in some * cases. * * @return a random generated polygon code */ public static int randomGeoAreaShapeType() { return random().nextInt(12); } /** * Method that returns a random generated a random Shape code from all convex supported shapes. * * @return a random generated convex shape code */ public static int randomConvexShapeType() { int shapeType = randomShapeType(); while (isConcave(shapeType)) { shapeType = randomShapeType(); } return shapeType; } /** * Method that returns a random generated a random Shape code from all concave supported shapes. * * @return a random generated concave shape code */ public static int randomConcaveShapeType() { int shapeType = randomShapeType(); while (!isConcave(shapeType)) { shapeType = randomShapeType(); } return shapeType; } /** * Check if a shape code represents a concave shape * * @return true if the shape represented by the code is concave */ public static boolean isConcave(int shapeType) { return (shapeType == CONCAVE_POLYGON); } /** * Method that returns empty Constraints object.. * * @return an empty Constraints object */ public static Constraints getEmptyConstraint() { return new Constraints(); } /** * Method that returns a random generated GeoPoint. * * @param planetModel The planet model. * @return The random generated GeoPoint. */ public static GeoPoint randomGeoPoint(PlanetModel planetModel) { GeoPoint point = null; while (point == null) { point = randomGeoPoint(planetModel, getEmptyConstraint()); } return point; } /** * Method that returns a random generated GeoPoint under given constraints. Returns NULL if it * cannot find a point under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPoint. */ public static GeoPoint randomGeoPoint(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_POINT_ITERATIONS) { double lat = RandomizedTest.randomDouble() * Math.PI / 2; if (random().nextBoolean()) { lat = (-1) * lat; } double lon = RandomizedTest.randomDouble() * Math.PI; if (random().nextBoolean()) { lon = (-1) * lon; } iterations++; GeoPoint point = new GeoPoint(planetModel, lat, lon); if (constraints.isWithin(point)) { return point; } } return null; } /** * Method that returns a random generated GeoAreaShape. * * @param shapeType The GeoAreaShape code. * @param planetModel The planet model. * @return The random generated GeoAreaShape. */ public static GeoAreaShape randomGeoAreaShape(int shapeType, PlanetModel planetModel) { GeoAreaShape geoAreaShape = null; while (geoAreaShape == null) { geoAreaShape = randomGeoAreaShape(shapeType, planetModel, new Constraints()); } return geoAreaShape; } /** * Method that returns a random generated GeoAreaShape under given constraints. Returns NULL if it * cannot build the GeoAreaShape under the given constraints. * * @param shapeType The GeoAreaShape code. * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoAreaShape. */ public static GeoAreaShape randomGeoAreaShape( int shapeType, PlanetModel planetModel, Constraints constraints) { return (GeoAreaShape) randomGeoShape(shapeType, planetModel, constraints); } /** * Method that returns a random generated GeoShape. * * @param shapeType The shape code. * @param planetModel The planet model. * @return The random generated GeoShape. */ public static GeoShape randomGeoShape(int shapeType, PlanetModel planetModel) { GeoShape geoShape = null; while (geoShape == null) { geoShape = randomGeoShape(shapeType, planetModel, new Constraints()); } return geoShape; } /** * Method that returns a random generated GeoShape under given constraints. Returns NULL if it * cannot build the GeoShape under the given constraints. * * @param shapeType The polygon code. * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoShape. */ public static GeoShape randomGeoShape( int shapeType, PlanetModel planetModel, Constraints constraints) { switch (shapeType) { case CONVEX_POLYGON: { return convexPolygon(planetModel, constraints); } case CONVEX_POLYGON_WITH_HOLES: { return convexPolygonWithHoles(planetModel, constraints); } case CONCAVE_POLYGON: { return concavePolygon(planetModel, constraints); } case CONCAVE_POLYGON_WITH_HOLES: { return concavePolygonWithHoles(planetModel, constraints); } case COMPLEX_POLYGON: { return complexPolygon(planetModel, constraints); } case CIRCLE: { return circle(planetModel, constraints); } case RECTANGLE: { return rectangle(planetModel, constraints); } case PATH: { return path(planetModel, constraints); } case COLLECTION: { return collection(planetModel, constraints); } case POINT: { return point(planetModel, constraints); } case LINE: { return line(planetModel, constraints); } case CONVEX_SIMPLE_POLYGON: { return simpleConvexPolygon(planetModel, constraints); } case CONCAVE_SIMPLE_POLYGON: { return concaveSimplePolygon(planetModel, constraints); } case EXACT_CIRCLE: { return exactCircle(planetModel, constraints); } default: throw new IllegalStateException("Unexpected shape type"); } } /** * Method that returns a random generated a GeoPointShape under given constraints. Returns NULL if * it cannot build the GeoCircle under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPointShape. */ private static GeoPointShape point(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; final GeoPoint point = randomGeoPoint(planetModel, constraints); if (point == null) { continue; } try { GeoPointShape pointShape = GeoPointShapeFactory.makeGeoPointShape( planetModel, point.getLatitude(), point.getLongitude()); if (!constraints.valid(pointShape)) { continue; } return pointShape; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a GeoCircle under given constraints. Returns NULL if it * cannot build the GeoCircle under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoCircle. */ private static GeoCircle circle(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; final GeoPoint center = randomGeoPoint(planetModel, constraints); if (center == null) { continue; } final double radius = randomCutoffAngle(); try { GeoCircle circle = GeoCircleFactory.makeGeoCircle( planetModel, center.getLatitude(), center.getLongitude(), radius); if (!constraints.valid(circle)) { continue; } return circle; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a GeoCircle under given constraints. Returns NULL if it * cannot build the GeoCircle under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoCircle. */ private static GeoCircle exactCircle(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; final GeoPoint center = randomGeoPoint(planetModel, constraints); if (center == null) { continue; } final double radius = randomCutoffAngle(); final int pow = random().nextInt(10) + 3; final double accuracy = random().nextDouble() * Math.pow(10, (-1) * pow); try { GeoCircle circle = GeoCircleFactory.makeExactGeoCircle( planetModel, center.getLatitude(), center.getLongitude(), radius, accuracy); if (!constraints.valid(circle)) { continue; } return circle; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a GeoBBox under given constraints. Returns NULL if it * cannot build the GeoBBox under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoBBox. */ private static GeoBBox rectangle(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; final GeoPoint point1 = randomGeoPoint(planetModel, constraints); if (point1 == null) { continue; } final GeoPoint point2 = randomGeoPoint(planetModel, constraints); if (point2 == null) { continue; } double minLat = Math.min(point1.getLatitude(), point2.getLatitude()); double maxLat = Math.max(point1.getLatitude(), point2.getLatitude()); double minLon = Math.min(point1.getLongitude(), point2.getLongitude()); double maxLon = Math.max(point1.getLongitude(), point2.getLongitude()); try { GeoBBox bbox = GeoBBoxFactory.makeGeoBBox(planetModel, maxLat, minLat, minLon, maxLon); if (!constraints.valid(bbox)) { continue; } return bbox; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated degenerate GeoPath under given constraints. Returns NULL * if it cannot build the degenerate GeoPath under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated degenerated GeoPath. */ private static GeoPath line(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; int vertexCount = random().nextInt(2) + 2; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 2) { continue; } try { GeoPath path = GeoPathFactory.makeGeoPath( planetModel, 0, geoPoints.toArray(new GeoPoint[geoPoints.size()])); if (!constraints.valid(path)) { continue; } return path; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a GeoPath under given constraints. Returns NULL if it * cannot build the GeoPath under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPath. */ private static GeoPath path(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; int vertexCount = random().nextInt(2) + 2; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 2) { continue; } double width = randomCutoffAngle(); try { GeoPath path = GeoPathFactory.makeGeoPath( planetModel, width, geoPoints.toArray(new GeoPoint[geoPoints.size()])); if (!constraints.valid(path)) { continue; } return path; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a GeoCompositeMembershipShape under given constraints. * Returns NULL if it cannot build the GGeoCompositeMembershipShape under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoCompositeMembershipShape. */ private static GeoCompositeAreaShape collection( PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; int numberShapes = random().nextInt(3) + 2; GeoCompositeAreaShape collection = new GeoCompositeAreaShape(planetModel); for (int i = 0; i < numberShapes; i++) { GeoPolygon member = convexPolygon(planetModel, constraints); if (member != null) { collection.addShape(member); } } if (collection.size() == 0) { continue; } return collection; } return null; } /** * Method that returns a random generated a convex GeoPolygon under given constraints. Returns * NULL if it cannot build the GePolygon under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon convexPolygon(PlanetModel planetModel, Constraints constraints) { int vertexCount = random().nextInt(4) + 3; int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 3) { continue; } List<GeoPoint> orderedGeoPoints = orderPoints(geoPoints); try { GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); if (!constraints.valid(polygon) || isConcave(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a convex GeoPolygon with holes under given constraints. * Returns NULL if it cannot build the GeoPolygon with holes under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon convexPolygonWithHoles( PlanetModel planetModel, Constraints constraints) { int vertexCount = random().nextInt(4) + 3; int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 3) { continue; } List<GeoPoint> orderedGeoPoints = orderPoints(geoPoints); try { GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); // polygon should comply with all constraints except disjoint as we have holes Constraints polygonConstraints = new Constraints(); polygonConstraints.putAll(constraints.getContains()); polygonConstraints.putAll(constraints.getWithin()); polygonConstraints.putAll(constraints.getDisjoint()); if (!polygonConstraints.valid(polygon) || isConcave(planetModel, polygon)) { continue; } // hole must overlap with polygon and comply with any CONTAINS constraint. Constraints holeConstraints = new Constraints(); holeConstraints.putAll(constraints.getContains()); holeConstraints.put(polygon, GeoArea.OVERLAPS); // Points must be with in the polygon and must comply // CONTAINS and DISJOINT constraints Constraints pointsConstraints = new Constraints(); pointsConstraints.put(polygon, GeoArea.WITHIN); pointsConstraints.putAll(constraints.getContains()); pointsConstraints.putAll(constraints.getDisjoint()); List<GeoPolygon> holes = concavePolygonHoles(planetModel, polygon, holeConstraints, pointsConstraints); // we should have at least one hole if (holes.size() == 0) { continue; } polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints, holes); if (!constraints.valid(polygon) || isConcave(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random list if concave GeoPolygons under given constraints. Method use to * generate convex holes. Note that constraints for points and holes are different, * * @param planetModel The planet model. * @param polygon The polygon where the holes are within. * @param holeConstraints The given constraints that a hole must comply. * @param pointConstraints The given constraints that a point must comply. * @return The random generated GeoPolygon. */ private static List<GeoPolygon> concavePolygonHoles( PlanetModel planetModel, GeoPolygon polygon, Constraints holeConstraints, Constraints pointConstraints) { int iterations = 0; int holesCount = random().nextInt(3) + 1; List<GeoPolygon> holes = new ArrayList<>(); while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; int vertexCount = random().nextInt(3) + 3; List<GeoPoint> geoPoints = points(vertexCount, planetModel, pointConstraints); if (geoPoints.size() < 3) { continue; } geoPoints = orderPoints(geoPoints); GeoPolygon inversePolygon = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints); // The convex polygon must be within if (inversePolygon == null || polygon.getRelationship(inversePolygon) != GeoArea.WITHIN) { continue; } // make it concave Collections.reverse(geoPoints); try { GeoPolygon hole = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints); if (!holeConstraints.valid(hole) || isConvex(planetModel, hole)) { continue; } holes.add(hole); if (holes.size() == holesCount) { return holes; } pointConstraints.put(hole, GeoArea.DISJOINT); } catch (IllegalArgumentException _) { continue; } } return holes; } /** * Method that returns a random generated a concave GeoPolygon under given constraints. Returns * NULL if it cannot build the concave GeoPolygon under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon concavePolygon(PlanetModel planetModel, Constraints constraints) { int vertexCount = random().nextInt(4) + 3; int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 3) { continue; } List<GeoPoint> orderedGeoPoints = orderPoints(geoPoints); Collections.reverse(orderedGeoPoints); try { GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); if (!constraints.valid(polygon) || isConvex(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a concave GeoPolygon with holes under given constraints. * Returns NULL if it cannot build the GeoPolygon under the given constraints. Note that the final * GeoPolygon is convex as the hole wraps the convex GeoPolygon. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon concavePolygonWithHoles( PlanetModel planetModel, Constraints constraints) { int vertexCount = random().nextInt(4) + 3; int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; // we first build the hole. We consider all constraints except // disjoint as we have a hole Constraints holeConstraints = new Constraints(); holeConstraints.putAll(constraints.getContains()); holeConstraints.putAll(constraints.getWithin()); holeConstraints.putAll(constraints.getOverlaps()); GeoPolygon hole = convexPolygon(planetModel, holeConstraints); if (hole == null) { continue; } // Now we get points for polygon. Must we within the hole // and we add contain constraints Constraints pointConstraints = new Constraints(); pointConstraints.put(hole, GeoArea.WITHIN); pointConstraints.putAll(constraints.getContains()); List<GeoPoint> geoPoints = points(vertexCount, planetModel, pointConstraints); if (geoPoints.size() < 3) { continue; } try { List<GeoPoint> orderedGeoPoints = orderPoints(geoPoints); GeoPolygon inversePolygon = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints); // The convex polygon must be within the hole if (inversePolygon == null || hole.getRelationship(inversePolygon) != GeoArea.WITHIN) { continue; } Collections.reverse(orderedGeoPoints); GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon( planetModel, orderedGeoPoints, Collections.singletonList(hole)); // final polygon must be convex if (!constraints.valid(polygon) || isConcave(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated complex GeoPolygon under given constraints. Returns NULL * if it cannot build the complex GeoPolygon under the given constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon complexPolygon(PlanetModel planetModel, Constraints constraints) { int polygonsCount = random().nextInt(2) + 1; int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPolygonFactory.PolygonDescription> polDescription = new ArrayList<>(); while (polDescription.size() < polygonsCount) { int vertexCount = random().nextInt(14) + 3; List<GeoPoint> geoPoints = points(vertexCount, planetModel, constraints); if (geoPoints.size() < 3) { break; } orderPoints(geoPoints); polDescription.add(new GeoPolygonFactory.PolygonDescription(geoPoints)); } try { GeoPolygon polygon = GeoPolygonFactory.makeLargeGeoPolygon(planetModel, polDescription); if (!constraints.valid(polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a concave square GeoPolygon under given constraints. * Returns NULL if it cannot build the concave GeoPolygon under the given constraints. This shape * is an utility to build constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon simpleConvexPolygon(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPoint> points = points(3, planetModel, constraints); if (points.size() < 3) { continue; } points = orderPoints(points); try { GeoPolygon polygon = GeoPolygonFactory.makeGeoConvexPolygon(planetModel, points); if (!constraints.valid(polygon) || isConcave(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random generated a convex square GeoPolygon under given constraints. * Returns NULL if it cannot build the convex GeoPolygon under the given constraints. This shape * is an utility to build constraints. * * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated GeoPolygon. */ private static GeoPolygon concaveSimplePolygon(PlanetModel planetModel, Constraints constraints) { int iterations = 0; while (iterations < MAX_SHAPE_ITERATIONS) { iterations++; List<GeoPoint> points = points(3, planetModel, constraints); if (points.size() < 3) { continue; } points = orderPoints(points); Collections.reverse(points); try { GeoPolygon polygon = GeoPolygonFactory.makeGeoConcavePolygon(planetModel, points); if (!constraints.valid(polygon) || isConvex(planetModel, polygon)) { continue; } return polygon; } catch (IllegalArgumentException _) { continue; } } return null; } /** * Method that returns a random list of generated GeoPoints under given constraints. The number of * points returned might be lower than the requested. * * @param count The number of points * @param planetModel The planet model. * @param constraints The given constraints. * @return The random generated List of GeoPoints. */ private static List<GeoPoint> points( int count, PlanetModel planetModel, Constraints constraints) { List<GeoPoint> geoPoints = new ArrayList<>(count); for (int i = 0; i < count; i++) { GeoPoint point = randomGeoPoint(planetModel, constraints); if (point != null) { geoPoints.add(point); } } return geoPoints; } /** * Check if a GeoPolygon is pure concave. Note that our definition for concavity is that the * polygon contains antipodal points. * * @param planetModel The planet model. * @param shape The polygon to check. * @return True if the polygon contains antipodal points. */ private static boolean isConcave(PlanetModel planetModel, GeoPolygon shape) { return (shape.isWithin(planetModel.NORTH_POLE) && shape.isWithin(planetModel.SOUTH_POLE)) || (shape.isWithin(planetModel.MAX_X_POLE) && shape.isWithin(planetModel.MIN_X_POLE)) || (shape.isWithin(planetModel.MAX_Y_POLE) && shape.isWithin(planetModel.MIN_Y_POLE)); } /** * Check if a GeoPolygon is pure convex. Note that our definition for convexity is that the * polygon does not contain antipodal points. * * @param planetModel The planet model. * @param shape The polygon to check. * @return True if the polygon dies not contains antipodal points. */ private static boolean isConvex(PlanetModel planetModel, GeoPolygon shape) { return !isConcave(planetModel, shape); } /** * Generates a random number between 0 and PI. * * @return the cutoff angle. */ private static double randomCutoffAngle() { return RandomizedTest.randomDouble() * Math.PI; } /** * Method that orders a lit of points anti-clock-wise to prevent crossing edges. * * @param points The points to order. * @return The list of ordered points anti-clockwise. */ public static List<GeoPoint> orderPoints(List<GeoPoint> points) { double x = 0; double y = 0; double z = 0; // get center of mass for (GeoPoint point : points) { x += point.x; y += point.y; z += point.z; } Map<Double, GeoPoint> pointWithAngle = new HashMap<>(); // get angle respect center of mass for (GeoPoint point : points) { GeoPoint center = new GeoPoint(x / points.size(), y / points.size(), z / points.size()); double cs = Math.sin(center.getLatitude()) * Math.sin(point.getLatitude()) + Math.cos(center.getLatitude()) * Math.cos(point.getLatitude()) * Math.cos(point.getLongitude() - center.getLongitude()); double posAng = Math.atan2( Math.cos(center.getLatitude()) * Math.cos(point.getLatitude()) * Math.sin(point.getLongitude() - center.getLongitude()), Math.sin(point.getLatitude()) - Math.sin(center.getLatitude()) * cs); pointWithAngle.put(posAng, point); } // order points List<Double> angles = new ArrayList<>(pointWithAngle.keySet()); Collections.sort(angles); Collections.reverse(angles); List<GeoPoint> orderedPoints = new ArrayList<>(); for (Double d : angles) { orderedPoints.add(pointWithAngle.get(d)); } return orderedPoints; } /** * Class that holds the constraints that are given to build shapes. It consists in a list of * GeoAreaShapes and relationships the new shape needs to satisfy. */ public static class Constraints extends HashMap<GeoAreaShape, Integer> { /** * Check if the shape is valid under the constraints. * * @param shape The shape to check * @return true if the shape satisfy the constraints, else false. */ public boolean valid(GeoShape shape) { if (shape == null) { return false; } for (GeoAreaShape constraint : keySet()) { if (constraint.getRelationship(shape) != get(constraint)) { return false; } } return true; } /** * Check if a point is Within the constraints. * * @param point The point to check * @return true if the point satisfy the constraints, else false. */ public boolean isWithin(GeoPoint point) { for (GeoShape constraint : keySet()) { if (!(validPoint(point, constraint, get(constraint)))) { return false; } } return true; } /** * Check if a point is Within one constraint given by a shape and a relationship. * * @param point The point to check * @param shape The shape of the constraint * @param relationship The relationship of the constraint. * @return true if the point satisfy the constraint, else false. */ private boolean validPoint(GeoPoint point, GeoShape shape, int relationship) { // For GeoCompositeMembershipShape we only consider the first shape to help // converging if (relationship == GeoArea.WITHIN && shape instanceof GeoCompositeMembershipShape) { shape = (((GeoCompositeMembershipShape) shape).getShapes().get(0)); } switch (relationship) { case GeoArea.DISJOINT: return !shape.isWithin(point); case GeoArea.OVERLAPS: return true; case GeoArea.CONTAINS: return !shape.isWithin(point); case GeoArea.WITHIN: return shape.isWithin(point); default: return true; } } /** * Collect the CONTAINS constraints in the object * * @return the CONTAINS constraints. */ public Constraints getContains() { return getConstraintsOfType(GeoArea.CONTAINS); } /** * Collect the WITHIN constraints in the object * * @return the WITHIN constraints. */ public Constraints getWithin() { return getConstraintsOfType(GeoArea.WITHIN); } /** * Collect the OVERLAPS constraints in the object * * @return the OVERLAPS constraints. */ public Constraints getOverlaps() { return getConstraintsOfType(GeoArea.OVERLAPS); } /** * Collect the DISJOINT constraints in the object * * @return the DISJOINT constraints. */ public Constraints getDisjoint() { return getConstraintsOfType(GeoArea.DISJOINT); } private Constraints getConstraintsOfType(int type) { Constraints constraints = new Constraints(); for (GeoAreaShape constraint : keySet()) { if (type == get(constraint)) { constraints.put(constraint, type); } } return constraints; } } }
googleapis/google-cloud-java
37,854
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/DisksScopedList.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.DisksScopedList} */ public final class DisksScopedList extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.DisksScopedList) DisksScopedListOrBuilder { private static final long serialVersionUID = 0L; // Use DisksScopedList.newBuilder() to construct. private DisksScopedList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DisksScopedList() { disks_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DisksScopedList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DisksScopedList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DisksScopedList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.DisksScopedList.class, com.google.cloud.compute.v1.DisksScopedList.Builder.class); } private int bitField0_; public static final int DISKS_FIELD_NUMBER = 95594102; @SuppressWarnings("serial") private java.util.List<com.google.cloud.compute.v1.Disk> disks_; /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ @java.lang.Override public java.util.List<com.google.cloud.compute.v1.Disk> getDisksList() { return disks_; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.compute.v1.DiskOrBuilder> getDisksOrBuilderList() { return disks_; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ @java.lang.Override public int getDisksCount() { return disks_.size(); } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ @java.lang.Override public com.google.cloud.compute.v1.Disk getDisks(int index) { return disks_.get(index); } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ @java.lang.Override public com.google.cloud.compute.v1.DiskOrBuilder getDisksOrBuilder(int index) { return disks_.get(index); } public static final int WARNING_FIELD_NUMBER = 50704284; private com.google.cloud.compute.v1.Warning warning_; /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> * * @return Whether the warning field is set. */ @java.lang.Override public boolean hasWarning() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> * * @return The warning. */ @java.lang.Override public com.google.cloud.compute.v1.Warning getWarning() { return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_; } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ @java.lang.Override public com.google.cloud.compute.v1.WarningOrBuilder getWarningOrBuilder() { return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(50704284, getWarning()); } for (int i = 0; i < disks_.size(); i++) { output.writeMessage(95594102, disks_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(50704284, getWarning()); } for (int i = 0; i < disks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(95594102, disks_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.DisksScopedList)) { return super.equals(obj); } com.google.cloud.compute.v1.DisksScopedList other = (com.google.cloud.compute.v1.DisksScopedList) obj; if (!getDisksList().equals(other.getDisksList())) return false; if (hasWarning() != other.hasWarning()) return false; if (hasWarning()) { if (!getWarning().equals(other.getWarning())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDisksCount() > 0) { hash = (37 * hash) + DISKS_FIELD_NUMBER; hash = (53 * hash) + getDisksList().hashCode(); } if (hasWarning()) { hash = (37 * hash) + WARNING_FIELD_NUMBER; hash = (53 * hash) + getWarning().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.DisksScopedList parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.DisksScopedList parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DisksScopedList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.DisksScopedList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.DisksScopedList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.DisksScopedList} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.DisksScopedList) com.google.cloud.compute.v1.DisksScopedListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DisksScopedList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DisksScopedList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.DisksScopedList.class, com.google.cloud.compute.v1.DisksScopedList.Builder.class); } // Construct using com.google.cloud.compute.v1.DisksScopedList.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDisksFieldBuilder(); getWarningFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (disksBuilder_ == null) { disks_ = java.util.Collections.emptyList(); } else { disks_ = null; disksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_DisksScopedList_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.DisksScopedList getDefaultInstanceForType() { return com.google.cloud.compute.v1.DisksScopedList.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.DisksScopedList build() { com.google.cloud.compute.v1.DisksScopedList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.DisksScopedList buildPartial() { com.google.cloud.compute.v1.DisksScopedList result = new com.google.cloud.compute.v1.DisksScopedList(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.compute.v1.DisksScopedList result) { if (disksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { disks_ = java.util.Collections.unmodifiableList(disks_); bitField0_ = (bitField0_ & ~0x00000001); } result.disks_ = disks_; } else { result.disks_ = disksBuilder_.build(); } } private void buildPartial0(com.google.cloud.compute.v1.DisksScopedList result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.warning_ = warningBuilder_ == null ? warning_ : warningBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.DisksScopedList) { return mergeFrom((com.google.cloud.compute.v1.DisksScopedList) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.DisksScopedList other) { if (other == com.google.cloud.compute.v1.DisksScopedList.getDefaultInstance()) return this; if (disksBuilder_ == null) { if (!other.disks_.isEmpty()) { if (disks_.isEmpty()) { disks_ = other.disks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDisksIsMutable(); disks_.addAll(other.disks_); } onChanged(); } } else { if (!other.disks_.isEmpty()) { if (disksBuilder_.isEmpty()) { disksBuilder_.dispose(); disksBuilder_ = null; disks_ = other.disks_; bitField0_ = (bitField0_ & ~0x00000001); disksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDisksFieldBuilder() : null; } else { disksBuilder_.addAllMessages(other.disks_); } } } if (other.hasWarning()) { mergeWarning(other.getWarning()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 405634274: { input.readMessage(getWarningFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 405634274 case 764752818: { com.google.cloud.compute.v1.Disk m = input.readMessage(com.google.cloud.compute.v1.Disk.parser(), extensionRegistry); if (disksBuilder_ == null) { ensureDisksIsMutable(); disks_.add(m); } else { disksBuilder_.addMessage(m); } break; } // case 764752818 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.compute.v1.Disk> disks_ = java.util.Collections.emptyList(); private void ensureDisksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { disks_ = new java.util.ArrayList<com.google.cloud.compute.v1.Disk>(disks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Disk, com.google.cloud.compute.v1.Disk.Builder, com.google.cloud.compute.v1.DiskOrBuilder> disksBuilder_; /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public java.util.List<com.google.cloud.compute.v1.Disk> getDisksList() { if (disksBuilder_ == null) { return java.util.Collections.unmodifiableList(disks_); } else { return disksBuilder_.getMessageList(); } } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public int getDisksCount() { if (disksBuilder_ == null) { return disks_.size(); } else { return disksBuilder_.getCount(); } } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public com.google.cloud.compute.v1.Disk getDisks(int index) { if (disksBuilder_ == null) { return disks_.get(index); } else { return disksBuilder_.getMessage(index); } } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder setDisks(int index, com.google.cloud.compute.v1.Disk value) { if (disksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDisksIsMutable(); disks_.set(index, value); onChanged(); } else { disksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder setDisks(int index, com.google.cloud.compute.v1.Disk.Builder builderForValue) { if (disksBuilder_ == null) { ensureDisksIsMutable(); disks_.set(index, builderForValue.build()); onChanged(); } else { disksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder addDisks(com.google.cloud.compute.v1.Disk value) { if (disksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDisksIsMutable(); disks_.add(value); onChanged(); } else { disksBuilder_.addMessage(value); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder addDisks(int index, com.google.cloud.compute.v1.Disk value) { if (disksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDisksIsMutable(); disks_.add(index, value); onChanged(); } else { disksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder addDisks(com.google.cloud.compute.v1.Disk.Builder builderForValue) { if (disksBuilder_ == null) { ensureDisksIsMutable(); disks_.add(builderForValue.build()); onChanged(); } else { disksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder addDisks(int index, com.google.cloud.compute.v1.Disk.Builder builderForValue) { if (disksBuilder_ == null) { ensureDisksIsMutable(); disks_.add(index, builderForValue.build()); onChanged(); } else { disksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder addAllDisks( java.lang.Iterable<? extends com.google.cloud.compute.v1.Disk> values) { if (disksBuilder_ == null) { ensureDisksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, disks_); onChanged(); } else { disksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder clearDisks() { if (disksBuilder_ == null) { disks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { disksBuilder_.clear(); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public Builder removeDisks(int index) { if (disksBuilder_ == null) { ensureDisksIsMutable(); disks_.remove(index); onChanged(); } else { disksBuilder_.remove(index); } return this; } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public com.google.cloud.compute.v1.Disk.Builder getDisksBuilder(int index) { return getDisksFieldBuilder().getBuilder(index); } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public com.google.cloud.compute.v1.DiskOrBuilder getDisksOrBuilder(int index) { if (disksBuilder_ == null) { return disks_.get(index); } else { return disksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public java.util.List<? extends com.google.cloud.compute.v1.DiskOrBuilder> getDisksOrBuilderList() { if (disksBuilder_ != null) { return disksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(disks_); } } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public com.google.cloud.compute.v1.Disk.Builder addDisksBuilder() { return getDisksFieldBuilder() .addBuilder(com.google.cloud.compute.v1.Disk.getDefaultInstance()); } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public com.google.cloud.compute.v1.Disk.Builder addDisksBuilder(int index) { return getDisksFieldBuilder() .addBuilder(index, com.google.cloud.compute.v1.Disk.getDefaultInstance()); } /** * * * <pre> * [Output Only] A list of disks contained in this scope. * </pre> * * <code>repeated .google.cloud.compute.v1.Disk disks = 95594102;</code> */ public java.util.List<com.google.cloud.compute.v1.Disk.Builder> getDisksBuilderList() { return getDisksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Disk, com.google.cloud.compute.v1.Disk.Builder, com.google.cloud.compute.v1.DiskOrBuilder> getDisksFieldBuilder() { if (disksBuilder_ == null) { disksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Disk, com.google.cloud.compute.v1.Disk.Builder, com.google.cloud.compute.v1.DiskOrBuilder>( disks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); disks_ = null; } return disksBuilder_; } private com.google.cloud.compute.v1.Warning warning_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Warning, com.google.cloud.compute.v1.Warning.Builder, com.google.cloud.compute.v1.WarningOrBuilder> warningBuilder_; /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> * * @return Whether the warning field is set. */ public boolean hasWarning() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> * * @return The warning. */ public com.google.cloud.compute.v1.Warning getWarning() { if (warningBuilder_ == null) { return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_; } else { return warningBuilder_.getMessage(); } } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public Builder setWarning(com.google.cloud.compute.v1.Warning value) { if (warningBuilder_ == null) { if (value == null) { throw new NullPointerException(); } warning_ = value; } else { warningBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public Builder setWarning(com.google.cloud.compute.v1.Warning.Builder builderForValue) { if (warningBuilder_ == null) { warning_ = builderForValue.build(); } else { warningBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public Builder mergeWarning(com.google.cloud.compute.v1.Warning value) { if (warningBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && warning_ != null && warning_ != com.google.cloud.compute.v1.Warning.getDefaultInstance()) { getWarningBuilder().mergeFrom(value); } else { warning_ = value; } } else { warningBuilder_.mergeFrom(value); } if (warning_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public Builder clearWarning() { bitField0_ = (bitField0_ & ~0x00000002); warning_ = null; if (warningBuilder_ != null) { warningBuilder_.dispose(); warningBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public com.google.cloud.compute.v1.Warning.Builder getWarningBuilder() { bitField0_ |= 0x00000002; onChanged(); return getWarningFieldBuilder().getBuilder(); } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ public com.google.cloud.compute.v1.WarningOrBuilder getWarningOrBuilder() { if (warningBuilder_ != null) { return warningBuilder_.getMessageOrBuilder(); } else { return warning_ == null ? com.google.cloud.compute.v1.Warning.getDefaultInstance() : warning_; } } /** * * * <pre> * [Output Only] Informational warning which replaces the list of disks when the list is empty. * </pre> * * <code>optional .google.cloud.compute.v1.Warning warning = 50704284;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Warning, com.google.cloud.compute.v1.Warning.Builder, com.google.cloud.compute.v1.WarningOrBuilder> getWarningFieldBuilder() { if (warningBuilder_ == null) { warningBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.Warning, com.google.cloud.compute.v1.Warning.Builder, com.google.cloud.compute.v1.WarningOrBuilder>( getWarning(), getParentForChildren(), isClean()); warning_ = null; } return warningBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.DisksScopedList) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.DisksScopedList) private static final com.google.cloud.compute.v1.DisksScopedList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.DisksScopedList(); } public static com.google.cloud.compute.v1.DisksScopedList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DisksScopedList> PARSER = new com.google.protobuf.AbstractParser<DisksScopedList>() { @java.lang.Override public DisksScopedList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DisksScopedList> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DisksScopedList> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.DisksScopedList getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,904
java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/UpdateEnvironmentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/environment.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2; /** * * * <pre> * The request message for * [Environments.UpdateEnvironment][google.cloud.dialogflow.v2.Environments.UpdateEnvironment]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.UpdateEnvironmentRequest} */ public final class UpdateEnvironmentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.UpdateEnvironmentRequest) UpdateEnvironmentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateEnvironmentRequest.newBuilder() to construct. private UpdateEnvironmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateEnvironmentRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateEnvironmentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EnvironmentProto .internal_static_google_cloud_dialogflow_v2_UpdateEnvironmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EnvironmentProto .internal_static_google_cloud_dialogflow_v2_UpdateEnvironmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.class, com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.Builder.class); } private int bitField0_; public static final int ENVIRONMENT_FIELD_NUMBER = 1; private com.google.cloud.dialogflow.v2.Environment environment_; /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the environment field is set. */ @java.lang.Override public boolean hasEnvironment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The environment. */ @java.lang.Override public com.google.cloud.dialogflow.v2.Environment getEnvironment() { return environment_ == null ? com.google.cloud.dialogflow.v2.Environment.getDefaultInstance() : environment_; } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.EnvironmentOrBuilder getEnvironmentOrBuilder() { return environment_ == null ? com.google.cloud.dialogflow.v2.Environment.getDefaultInstance() : environment_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int ALLOW_LOAD_TO_DRAFT_AND_DISCARD_CHANGES_FIELD_NUMBER = 3; private boolean allowLoadToDraftAndDiscardChanges_ = false; /** * * * <pre> * Optional. This field is used to prevent accidental overwrite of the default * environment, which is an operation that cannot be undone. To confirm that * the caller desires this overwrite, this field must be explicitly set to * true when updating the default environment (environment ID = `-`). * </pre> * * <code> * bool allow_load_to_draft_and_discard_changes = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The allowLoadToDraftAndDiscardChanges. */ @java.lang.Override public boolean getAllowLoadToDraftAndDiscardChanges() { return allowLoadToDraftAndDiscardChanges_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getEnvironment()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } if (allowLoadToDraftAndDiscardChanges_ != false) { output.writeBool(3, allowLoadToDraftAndDiscardChanges_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEnvironment()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (allowLoadToDraftAndDiscardChanges_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 3, allowLoadToDraftAndDiscardChanges_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest other = (com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest) obj; if (hasEnvironment() != other.hasEnvironment()) return false; if (hasEnvironment()) { if (!getEnvironment().equals(other.getEnvironment())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (getAllowLoadToDraftAndDiscardChanges() != other.getAllowLoadToDraftAndDiscardChanges()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEnvironment()) { hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER; hash = (53 * hash) + getEnvironment().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (37 * hash) + ALLOW_LOAD_TO_DRAFT_AND_DISCARD_CHANGES_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowLoadToDraftAndDiscardChanges()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [Environments.UpdateEnvironment][google.cloud.dialogflow.v2.Environments.UpdateEnvironment]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.UpdateEnvironmentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.UpdateEnvironmentRequest) com.google.cloud.dialogflow.v2.UpdateEnvironmentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EnvironmentProto .internal_static_google_cloud_dialogflow_v2_UpdateEnvironmentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EnvironmentProto .internal_static_google_cloud_dialogflow_v2_UpdateEnvironmentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.class, com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEnvironmentFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; environment_ = null; if (environmentBuilder_ != null) { environmentBuilder_.dispose(); environmentBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } allowLoadToDraftAndDiscardChanges_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.EnvironmentProto .internal_static_google_cloud_dialogflow_v2_UpdateEnvironmentRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest build() { com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest buildPartial() { com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest result = new com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.environment_ = environmentBuilder_ == null ? environment_ : environmentBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.allowLoadToDraftAndDiscardChanges_ = allowLoadToDraftAndDiscardChanges_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest other) { if (other == com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest.getDefaultInstance()) return this; if (other.hasEnvironment()) { mergeEnvironment(other.getEnvironment()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.getAllowLoadToDraftAndDiscardChanges() != false) { setAllowLoadToDraftAndDiscardChanges(other.getAllowLoadToDraftAndDiscardChanges()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getEnvironmentFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { allowLoadToDraftAndDiscardChanges_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dialogflow.v2.Environment environment_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.Environment, com.google.cloud.dialogflow.v2.Environment.Builder, com.google.cloud.dialogflow.v2.EnvironmentOrBuilder> environmentBuilder_; /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the environment field is set. */ public boolean hasEnvironment() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The environment. */ public com.google.cloud.dialogflow.v2.Environment getEnvironment() { if (environmentBuilder_ == null) { return environment_ == null ? com.google.cloud.dialogflow.v2.Environment.getDefaultInstance() : environment_; } else { return environmentBuilder_.getMessage(); } } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEnvironment(com.google.cloud.dialogflow.v2.Environment value) { if (environmentBuilder_ == null) { if (value == null) { throw new NullPointerException(); } environment_ = value; } else { environmentBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEnvironment( com.google.cloud.dialogflow.v2.Environment.Builder builderForValue) { if (environmentBuilder_ == null) { environment_ = builderForValue.build(); } else { environmentBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEnvironment(com.google.cloud.dialogflow.v2.Environment value) { if (environmentBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && environment_ != null && environment_ != com.google.cloud.dialogflow.v2.Environment.getDefaultInstance()) { getEnvironmentBuilder().mergeFrom(value); } else { environment_ = value; } } else { environmentBuilder_.mergeFrom(value); } if (environment_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEnvironment() { bitField0_ = (bitField0_ & ~0x00000001); environment_ = null; if (environmentBuilder_ != null) { environmentBuilder_.dispose(); environmentBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.Environment.Builder getEnvironmentBuilder() { bitField0_ |= 0x00000001; onChanged(); return getEnvironmentFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.EnvironmentOrBuilder getEnvironmentOrBuilder() { if (environmentBuilder_ != null) { return environmentBuilder_.getMessageOrBuilder(); } else { return environment_ == null ? com.google.cloud.dialogflow.v2.Environment.getDefaultInstance() : environment_; } } /** * * * <pre> * Required. The environment to update. * </pre> * * <code> * .google.cloud.dialogflow.v2.Environment environment = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.Environment, com.google.cloud.dialogflow.v2.Environment.Builder, com.google.cloud.dialogflow.v2.EnvironmentOrBuilder> getEnvironmentFieldBuilder() { if (environmentBuilder_ == null) { environmentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.Environment, com.google.cloud.dialogflow.v2.Environment.Builder, com.google.cloud.dialogflow.v2.EnvironmentOrBuilder>( getEnvironment(), getParentForChildren(), isClean()); environment_ = null; } return environmentBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The mask to control which fields get updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private boolean allowLoadToDraftAndDiscardChanges_; /** * * * <pre> * Optional. This field is used to prevent accidental overwrite of the default * environment, which is an operation that cannot be undone. To confirm that * the caller desires this overwrite, this field must be explicitly set to * true when updating the default environment (environment ID = `-`). * </pre> * * <code> * bool allow_load_to_draft_and_discard_changes = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The allowLoadToDraftAndDiscardChanges. */ @java.lang.Override public boolean getAllowLoadToDraftAndDiscardChanges() { return allowLoadToDraftAndDiscardChanges_; } /** * * * <pre> * Optional. This field is used to prevent accidental overwrite of the default * environment, which is an operation that cannot be undone. To confirm that * the caller desires this overwrite, this field must be explicitly set to * true when updating the default environment (environment ID = `-`). * </pre> * * <code> * bool allow_load_to_draft_and_discard_changes = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The allowLoadToDraftAndDiscardChanges to set. * @return This builder for chaining. */ public Builder setAllowLoadToDraftAndDiscardChanges(boolean value) { allowLoadToDraftAndDiscardChanges_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. This field is used to prevent accidental overwrite of the default * environment, which is an operation that cannot be undone. To confirm that * the caller desires this overwrite, this field must be explicitly set to * true when updating the default environment (environment ID = `-`). * </pre> * * <code> * bool allow_load_to_draft_and_discard_changes = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearAllowLoadToDraftAndDiscardChanges() { bitField0_ = (bitField0_ & ~0x00000004); allowLoadToDraftAndDiscardChanges_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.UpdateEnvironmentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.UpdateEnvironmentRequest) private static final com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest(); } public static com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateEnvironmentRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateEnvironmentRequest>() { @java.lang.Override public UpdateEnvironmentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateEnvironmentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateEnvironmentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.UpdateEnvironmentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,795
java-video-intelligence/proto-google-cloud-video-intelligence-v1/src/main/java/com/google/cloud/videointelligence/v1/FaceDetectionAnnotation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1; /** * * * <pre> * Face detection annotation. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.FaceDetectionAnnotation} */ public final class FaceDetectionAnnotation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.FaceDetectionAnnotation) FaceDetectionAnnotationOrBuilder { private static final long serialVersionUID = 0L; // Use FaceDetectionAnnotation.newBuilder() to construct. private FaceDetectionAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FaceDetectionAnnotation() { tracks_ = java.util.Collections.emptyList(); thumbnail_ = com.google.protobuf.ByteString.EMPTY; version_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FaceDetectionAnnotation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_FaceDetectionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_FaceDetectionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.class, com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.Builder.class); } public static final int TRACKS_FIELD_NUMBER = 3; @SuppressWarnings("serial") private java.util.List<com.google.cloud.videointelligence.v1.Track> tracks_; /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ @java.lang.Override public java.util.List<com.google.cloud.videointelligence.v1.Track> getTracksList() { return tracks_; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksOrBuilderList() { return tracks_; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ @java.lang.Override public int getTracksCount() { return tracks_.size(); } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.Track getTracks(int index) { return tracks_.get(index); } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.TrackOrBuilder getTracksOrBuilder(int index) { return tracks_.get(index); } public static final int THUMBNAIL_FIELD_NUMBER = 4; private com.google.protobuf.ByteString thumbnail_ = com.google.protobuf.ByteString.EMPTY; /** * * * <pre> * The thumbnail of a person's face. * </pre> * * <code>bytes thumbnail = 4;</code> * * @return The thumbnail. */ @java.lang.Override public com.google.protobuf.ByteString getThumbnail() { return thumbnail_; } public static final int VERSION_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < tracks_.size(); i++) { output.writeMessage(3, tracks_.get(i)); } if (!thumbnail_.isEmpty()) { output.writeBytes(4, thumbnail_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, version_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < tracks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, tracks_.get(i)); } if (!thumbnail_.isEmpty()) { size += com.google.protobuf.CodedOutputStream.computeBytesSize(4, thumbnail_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, version_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1.FaceDetectionAnnotation)) { return super.equals(obj); } com.google.cloud.videointelligence.v1.FaceDetectionAnnotation other = (com.google.cloud.videointelligence.v1.FaceDetectionAnnotation) obj; if (!getTracksList().equals(other.getTracksList())) return false; if (!getThumbnail().equals(other.getThumbnail())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getTracksCount() > 0) { hash = (37 * hash) + TRACKS_FIELD_NUMBER; hash = (53 * hash) + getTracksList().hashCode(); } hash = (37 * hash) + THUMBNAIL_FIELD_NUMBER; hash = (53 * hash) + getThumbnail().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1.FaceDetectionAnnotation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Face detection annotation. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.FaceDetectionAnnotation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.FaceDetectionAnnotation) com.google.cloud.videointelligence.v1.FaceDetectionAnnotationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_FaceDetectionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_FaceDetectionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.class, com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.Builder.class); } // Construct using com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (tracksBuilder_ == null) { tracks_ = java.util.Collections.emptyList(); } else { tracks_ = null; tracksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); thumbnail_ = com.google.protobuf.ByteString.EMPTY; version_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_FaceDetectionAnnotation_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1.FaceDetectionAnnotation getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1.FaceDetectionAnnotation build() { com.google.cloud.videointelligence.v1.FaceDetectionAnnotation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1.FaceDetectionAnnotation buildPartial() { com.google.cloud.videointelligence.v1.FaceDetectionAnnotation result = new com.google.cloud.videointelligence.v1.FaceDetectionAnnotation(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.videointelligence.v1.FaceDetectionAnnotation result) { if (tracksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { tracks_ = java.util.Collections.unmodifiableList(tracks_); bitField0_ = (bitField0_ & ~0x00000001); } result.tracks_ = tracks_; } else { result.tracks_ = tracksBuilder_.build(); } } private void buildPartial0( com.google.cloud.videointelligence.v1.FaceDetectionAnnotation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.thumbnail_ = thumbnail_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.version_ = version_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1.FaceDetectionAnnotation) { return mergeFrom((com.google.cloud.videointelligence.v1.FaceDetectionAnnotation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.videointelligence.v1.FaceDetectionAnnotation other) { if (other == com.google.cloud.videointelligence.v1.FaceDetectionAnnotation.getDefaultInstance()) return this; if (tracksBuilder_ == null) { if (!other.tracks_.isEmpty()) { if (tracks_.isEmpty()) { tracks_ = other.tracks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureTracksIsMutable(); tracks_.addAll(other.tracks_); } onChanged(); } } else { if (!other.tracks_.isEmpty()) { if (tracksBuilder_.isEmpty()) { tracksBuilder_.dispose(); tracksBuilder_ = null; tracks_ = other.tracks_; bitField0_ = (bitField0_ & ~0x00000001); tracksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getTracksFieldBuilder() : null; } else { tracksBuilder_.addAllMessages(other.tracks_); } } } if (other.getThumbnail() != com.google.protobuf.ByteString.EMPTY) { setThumbnail(other.getThumbnail()); } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 26: { com.google.cloud.videointelligence.v1.Track m = input.readMessage( com.google.cloud.videointelligence.v1.Track.parser(), extensionRegistry); if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(m); } else { tracksBuilder_.addMessage(m); } break; } // case 26 case 34: { thumbnail_ = input.readBytes(); bitField0_ |= 0x00000002; break; } // case 34 case 42: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.videointelligence.v1.Track> tracks_ = java.util.Collections.emptyList(); private void ensureTracksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { tracks_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.Track>(tracks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder> tracksBuilder_; /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.Track> getTracksList() { if (tracksBuilder_ == null) { return java.util.Collections.unmodifiableList(tracks_); } else { return tracksBuilder_.getMessageList(); } } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public int getTracksCount() { if (tracksBuilder_ == null) { return tracks_.size(); } else { return tracksBuilder_.getCount(); } } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public com.google.cloud.videointelligence.v1.Track getTracks(int index) { if (tracksBuilder_ == null) { return tracks_.get(index); } else { return tracksBuilder_.getMessage(index); } } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder setTracks(int index, com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.set(index, value); onChanged(); } else { tracksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder setTracks( int index, com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.set(index, builderForValue.build()); onChanged(); } else { tracksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder addTracks(com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.add(value); onChanged(); } else { tracksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder addTracks(int index, com.google.cloud.videointelligence.v1.Track value) { if (tracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureTracksIsMutable(); tracks_.add(index, value); onChanged(); } else { tracksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder addTracks(com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(builderForValue.build()); onChanged(); } else { tracksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder addTracks( int index, com.google.cloud.videointelligence.v1.Track.Builder builderForValue) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.add(index, builderForValue.build()); onChanged(); } else { tracksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder addAllTracks( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.Track> values) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tracks_); onChanged(); } else { tracksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder clearTracks() { if (tracksBuilder_ == null) { tracks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { tracksBuilder_.clear(); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public Builder removeTracks(int index) { if (tracksBuilder_ == null) { ensureTracksIsMutable(); tracks_.remove(index); onChanged(); } else { tracksBuilder_.remove(index); } return this; } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder getTracksBuilder(int index) { return getTracksFieldBuilder().getBuilder(index); } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public com.google.cloud.videointelligence.v1.TrackOrBuilder getTracksOrBuilder(int index) { if (tracksBuilder_ == null) { return tracks_.get(index); } else { return tracksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public java.util.List<? extends com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksOrBuilderList() { if (tracksBuilder_ != null) { return tracksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(tracks_); } } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder addTracksBuilder() { return getTracksFieldBuilder() .addBuilder(com.google.cloud.videointelligence.v1.Track.getDefaultInstance()); } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public com.google.cloud.videointelligence.v1.Track.Builder addTracksBuilder(int index) { return getTracksFieldBuilder() .addBuilder(index, com.google.cloud.videointelligence.v1.Track.getDefaultInstance()); } /** * * * <pre> * The face tracks with attributes. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.Track tracks = 3;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.Track.Builder> getTracksBuilderList() { return getTracksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder> getTracksFieldBuilder() { if (tracksBuilder_ == null) { tracksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.Track, com.google.cloud.videointelligence.v1.Track.Builder, com.google.cloud.videointelligence.v1.TrackOrBuilder>( tracks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); tracks_ = null; } return tracksBuilder_; } private com.google.protobuf.ByteString thumbnail_ = com.google.protobuf.ByteString.EMPTY; /** * * * <pre> * The thumbnail of a person's face. * </pre> * * <code>bytes thumbnail = 4;</code> * * @return The thumbnail. */ @java.lang.Override public com.google.protobuf.ByteString getThumbnail() { return thumbnail_; } /** * * * <pre> * The thumbnail of a person's face. * </pre> * * <code>bytes thumbnail = 4;</code> * * @param value The thumbnail to set. * @return This builder for chaining. */ public Builder setThumbnail(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } thumbnail_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The thumbnail of a person's face. * </pre> * * <code>bytes thumbnail = 4;</code> * * @return This builder for chaining. */ public Builder clearThumbnail() { bitField0_ = (bitField0_ & ~0x00000002); thumbnail_ = getDefaultInstance().getThumbnail(); onChanged(); return this; } private java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 5;</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.FaceDetectionAnnotation) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.FaceDetectionAnnotation) private static final com.google.cloud.videointelligence.v1.FaceDetectionAnnotation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.FaceDetectionAnnotation(); } public static com.google.cloud.videointelligence.v1.FaceDetectionAnnotation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FaceDetectionAnnotation> PARSER = new com.google.protobuf.AbstractParser<FaceDetectionAnnotation>() { @java.lang.Override public FaceDetectionAnnotation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FaceDetectionAnnotation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FaceDetectionAnnotation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1.FaceDetectionAnnotation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,741
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListStreamsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/streams_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Message for requesting list of Streams. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListStreamsRequest} */ public final class ListStreamsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListStreamsRequest) ListStreamsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListStreamsRequest.newBuilder() to construct. private ListStreamsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListStreamsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListStreamsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.StreamsServiceProto .internal_static_google_cloud_visionai_v1_ListStreamsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.StreamsServiceProto .internal_static_google_cloud_visionai_v1_ListStreamsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListStreamsRequest.class, com.google.cloud.visionai.v1.ListStreamsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object orderBy_ = ""; /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListStreamsRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListStreamsRequest other = (com.google.cloud.visionai.v1.ListStreamsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListStreamsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListStreamsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for requesting list of Streams. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListStreamsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListStreamsRequest) com.google.cloud.visionai.v1.ListStreamsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.StreamsServiceProto .internal_static_google_cloud_visionai_v1_ListStreamsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.StreamsServiceProto .internal_static_google_cloud_visionai_v1_ListStreamsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListStreamsRequest.class, com.google.cloud.visionai.v1.ListStreamsRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListStreamsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.StreamsServiceProto .internal_static_google_cloud_visionai_v1_ListStreamsRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListStreamsRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListStreamsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListStreamsRequest build() { com.google.cloud.visionai.v1.ListStreamsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListStreamsRequest buildPartial() { com.google.cloud.visionai.v1.ListStreamsRequest result = new com.google.cloud.visionai.v1.ListStreamsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.visionai.v1.ListStreamsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.orderBy_ = orderBy_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListStreamsRequest) { return mergeFrom((com.google.cloud.visionai.v1.ListStreamsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListStreamsRequest other) { if (other == com.google.cloud.visionai.v1.ListStreamsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { orderBy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListStreamsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Filtering results. * </pre> * * <code>string filter = 4;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * * * <pre> * Hint for how to order the results. * </pre> * * <code>string order_by = 5;</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListStreamsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListStreamsRequest) private static final com.google.cloud.visionai.v1.ListStreamsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListStreamsRequest(); } public static com.google.cloud.visionai.v1.ListStreamsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListStreamsRequest> PARSER = new com.google.protobuf.AbstractParser<ListStreamsRequest>() { @java.lang.Override public ListStreamsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListStreamsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListStreamsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListStreamsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/maven
37,711
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/InferenceStrategyTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.maven.cling.invoker.mvnup.goals; import java.io.StringReader; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Optional; import org.apache.maven.api.cli.mvnup.UpgradeOptions; import org.apache.maven.cling.invoker.mvnup.UpgradeContext; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.input.SAXBuilder; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Unit tests for the {@link InferenceStrategy} class. * Tests Maven 4.1.0+ inference optimizations including dependency and parent inference. */ @DisplayName("InferenceStrategy") class InferenceStrategyTest { private InferenceStrategy strategy; private SAXBuilder saxBuilder; @BeforeEach void setUp() { strategy = new InferenceStrategy(); saxBuilder = new SAXBuilder(); } private UpgradeContext createMockContext() { return TestUtils.createMockContext(); } private UpgradeContext createMockContext(UpgradeOptions options) { return TestUtils.createMockContext(options); } private UpgradeOptions createDefaultOptions() { return TestUtils.createDefaultOptions(); } @Nested @DisplayName("Applicability") class ApplicabilityTests { @Test @DisplayName("should be applicable when --infer option is true") void shouldBeApplicableWhenInferOptionTrue() { UpgradeOptions options = mock(UpgradeOptions.class); when(options.infer()).thenReturn(Optional.of(true)); when(options.all()).thenReturn(Optional.empty()); UpgradeContext context = createMockContext(options); assertTrue(strategy.isApplicable(context), "Strategy should be applicable when --infer is true"); } @Test @DisplayName("should be applicable when --all option is specified") void shouldBeApplicableWhenAllOptionSpecified() { UpgradeOptions options = mock(UpgradeOptions.class); when(options.all()).thenReturn(Optional.of(true)); when(options.infer()).thenReturn(Optional.empty()); UpgradeContext context = createMockContext(options); assertTrue(strategy.isApplicable(context), "Strategy should be applicable when --all is specified"); } @Test @DisplayName("should be applicable by default when no specific options provided") void shouldBeApplicableByDefaultWhenNoSpecificOptions() { UpgradeOptions options = createDefaultOptions(); UpgradeContext context = createMockContext(options); assertTrue( strategy.isApplicable(context), "Strategy should be applicable by default when no specific options are provided"); } @Test @DisplayName("should not be applicable when --infer option is false") void shouldNotBeApplicableWhenInferOptionFalse() { UpgradeOptions options = mock(UpgradeOptions.class); when(options.infer()).thenReturn(Optional.of(false)); when(options.all()).thenReturn(Optional.empty()); UpgradeContext context = createMockContext(options); assertFalse(strategy.isApplicable(context), "Strategy should not be applicable when --infer is false"); } } @Nested @DisplayName("Dependency Inference") class DependencyInferenceTests { @Test @DisplayName("should remove dependency version for project artifact") void shouldRemoveDependencyVersionForProjectArtifact() throws Exception { String parentPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.1.0") .modelVersion("4.1.0") .groupId("com.example") .artifactId("parent-project") .version("1.0.0") .packaging("pom") .build(); String moduleAPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.1.0") .modelVersion("4.1.0") .parent("com.example", "parent-project", "1.0.0") .artifactId("module-a") .build(); String moduleBPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> </parent> <artifactId>module-b</artifactId> <dependencies> <dependency> <groupId>com.example</groupId> <artifactId>module-a</artifactId> <version>1.0.0</version> </dependency> </dependencies> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document moduleADoc = saxBuilder.build(new StringReader(moduleAPomXml)); Document moduleBDoc = saxBuilder.build(new StringReader(moduleBPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "module-a", "pom.xml"), moduleADoc); pomMap.put(Paths.get("project", "module-b", "pom.xml"), moduleBDoc); Element moduleBRoot = moduleBDoc.getRootElement(); Element dependencies = moduleBRoot.getChild("dependencies", moduleBRoot.getNamespace()); Element dependency = dependencies.getChild("dependency", moduleBRoot.getNamespace()); // Verify dependency elements exist before inference assertNotNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("version", moduleBRoot.getNamespace())); // Apply dependency inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify version was removed (can be inferred from project) assertNull(dependency.getChild("version", moduleBRoot.getNamespace())); // groupId should also be removed (can be inferred from project) assertNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); // artifactId should remain (always required) assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); } @Test @DisplayName("should keep dependency version for external artifact") void shouldKeepDependencyVersionForExternalArtifact() throws Exception { String modulePomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <groupId>com.example</groupId> <artifactId>my-module</artifactId> <version>1.0.0</version> <dependencies> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> <version>3.12.0</version> </dependency> </dependencies> </project> """; Document moduleDoc = saxBuilder.build(new StringReader(modulePomXml)); Map<Path, Document> pomMap = Map.of(Paths.get("project", "pom.xml"), moduleDoc); Element moduleRoot = moduleDoc.getRootElement(); Element dependencies = moduleRoot.getChild("dependencies", moduleRoot.getNamespace()); Element dependency = dependencies.getChild("dependency", moduleRoot.getNamespace()); // Apply dependency inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify all dependency elements remain (external dependency) assertNotNull(dependency.getChild("groupId", moduleRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleRoot.getNamespace())); assertNotNull(dependency.getChild("version", moduleRoot.getNamespace())); } @Test @DisplayName("should keep dependency version when version mismatch") void shouldKeepDependencyVersionWhenVersionMismatch() throws Exception { String moduleAPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.1.0") .modelVersion("4.1.0") .groupId("com.example") .artifactId("module-a") .version("1.0.0") .build(); String moduleBPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.1.0") .modelVersion("4.1.0") .groupId("com.example") .artifactId("module-b") .version("1.0.0") .dependency("com.example", "module-a", "0.9.0") .build(); Document moduleADoc = saxBuilder.build(new StringReader(moduleAPomXml)); Document moduleBDoc = saxBuilder.build(new StringReader(moduleBPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "module-a", "pom.xml"), moduleADoc); pomMap.put(Paths.get("project", "module-b", "pom.xml"), moduleBDoc); Element moduleBRoot = moduleBDoc.getRootElement(); Element dependencies = moduleBRoot.getChild("dependencies", moduleBRoot.getNamespace()); Element dependency = dependencies.getChild("dependency", moduleBRoot.getNamespace()); // Apply dependency inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify correct behavior when version doesn't match: // - groupId should be removed (can be inferred from project regardless of version) // - version should remain (doesn't match project version, so can't be inferred) // - artifactId should remain (always required) assertNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("version", moduleBRoot.getNamespace())); } @Test @DisplayName("should handle plugin dependencies") void shouldHandlePluginDependencies() throws Exception { String moduleAPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <groupId>com.example</groupId> <artifactId>module-a</artifactId> <version>1.0.0</version> </project> """; String moduleBPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <groupId>com.example</groupId> <artifactId>module-b</artifactId> <version>1.0.0</version> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <dependencies> <dependency> <groupId>com.example</groupId> <artifactId>module-a</artifactId> <version>1.0.0</version> </dependency> </dependencies> </plugin> </plugins> </build> </project> """; Document moduleADoc = saxBuilder.build(new StringReader(moduleAPomXml)); Document moduleBDoc = saxBuilder.build(new StringReader(moduleBPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "module-a", "pom.xml"), moduleADoc); pomMap.put(Paths.get("project", "module-b", "pom.xml"), moduleBDoc); Element moduleBRoot = moduleBDoc.getRootElement(); Element build = moduleBRoot.getChild("build", moduleBRoot.getNamespace()); Element plugins = build.getChild("plugins", moduleBRoot.getNamespace()); Element plugin = plugins.getChild("plugin", moduleBRoot.getNamespace()); Element dependencies = plugin.getChild("dependencies", moduleBRoot.getNamespace()); Element dependency = dependencies.getChild("dependency", moduleBRoot.getNamespace()); // Apply dependency inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify version and groupId were removed from plugin dependency assertNull(dependency.getChild("version", moduleBRoot.getNamespace())); assertNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); } } @Nested @DisplayName("Parent Inference") class ParentInferenceTests { @Test @DisplayName("should remove parent groupId when child doesn't have explicit groupId") void shouldRemoveParentGroupIdWhenChildDoesntHaveExplicitGroupId() throws Exception { String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> </project> """; String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <relativePath>../pom.xml</relativePath> </parent> <artifactId>child-project</artifactId> <!-- No explicit groupId - will inherit from parent --> <!-- No explicit version - will inherit from parent --> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Verify parent elements exist before inference assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify parent groupId and version were removed (since child doesn't have explicit ones) assertNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNull(parentElement.getChild("version", childRoot.getNamespace())); // artifactId should also be removed since parent POM is in pomMap assertNull(parentElement.getChild("artifactId", childRoot.getNamespace())); } @Test @DisplayName("should keep parent groupId when child has explicit groupId") void shouldKeepParentGroupIdWhenChildHasExplicitGroupId() throws Exception { String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> </project> """; String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>com.example.child</groupId> <artifactId>child-project</artifactId> <version>2.0.0</version> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify parent elements are kept (since child has explicit values) assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); // artifactId should still be removed since parent POM is in pomMap assertNull(parentElement.getChild("artifactId", childRoot.getNamespace())); } @Test @DisplayName("should not trim parent elements when parent is external") void shouldNotTrimParentElementsWhenParentIsExternal() throws Exception { String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <parent> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>3.0.0</version> <relativePath/> </parent> <artifactId>my-spring-app</artifactId> <!-- No explicit groupId or version - would inherit from parent --> </project> """; Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = Map.of(Paths.get("project", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify correct behavior for external parent: // - groupId should NOT be removed (external parents need groupId to be located) // - artifactId should NOT be removed (external parents need artifactId to be located) // - version should NOT be removed (external parents need version to be located) // This prevents the "parent.groupId is missing" error reported in issue #7934 assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); } @Test @DisplayName("should trim parent elements when parent is in reactor") void shouldTrimParentElementsWhenParentIsInReactor() throws Exception { // Create parent POM String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <packaging>pom</packaging> </project> """; // Create child POM that references the parent String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.1.0"> <modelVersion>4.1.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> </parent> <artifactId>child-project</artifactId> <!-- No explicit groupId or version - would inherit from parent --> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); // Both POMs are in the reactor Map<Path, Document> pomMap = Map.of( Paths.get("pom.xml"), parentDoc, Paths.get("child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify correct behavior for reactor parent: // - groupId should be removed (child has no explicit groupId, parent is in reactor) // - artifactId should be removed (can be inferred from relativePath) // - version should be removed (child has no explicit version, parent is in reactor) assertNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNull(parentElement.getChild("version", childRoot.getNamespace())); } } @Nested @DisplayName("Maven 4.0.0 Limited Inference") class Maven400LimitedInferenceTests { @Test @DisplayName("should remove child groupId and version when they match parent in 4.0.0") void shouldRemoveChildGroupIdAndVersionWhenTheyMatchParentIn400() throws Exception { String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <packaging>pom</packaging> </project> """; String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>com.example</groupId> <artifactId>child-project</artifactId> <version>1.0.0</version> <!-- Child groupId and version match parent - can be inferred --> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Verify child and parent elements exist before inference assertNotNull(childRoot.getChild("groupId", childRoot.getNamespace())); assertNotNull(childRoot.getChild("version", childRoot.getNamespace())); assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify child groupId and version were removed (Maven 4.0.0 can infer these from parent) assertNull(childRoot.getChild("groupId", childRoot.getNamespace())); assertNull(childRoot.getChild("version", childRoot.getNamespace())); // Child artifactId should remain (always required) assertNotNull(childRoot.getChild("artifactId", childRoot.getNamespace())); // Parent elements should all remain (no relativePath inference in 4.0.0) assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); } @Test @DisplayName("should keep child groupId when it differs from parent in 4.0.0") void shouldKeepChildGroupIdWhenItDiffersFromParentIn400() throws Exception { String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <packaging>pom</packaging> </project> """; String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>com.example.child</groupId> <artifactId>child-project</artifactId> <version>2.0.0</version> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify child elements are kept (since they differ from parent) assertNotNull(childRoot.getChild("groupId", childRoot.getNamespace())); assertNotNull(childRoot.getChild("version", childRoot.getNamespace())); assertNotNull(childRoot.getChild("artifactId", childRoot.getNamespace())); // Parent elements should all remain (no relativePath inference in 4.0.0) assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); } @Test @DisplayName("should handle partial inheritance in 4.0.0") void shouldHandlePartialInheritanceIn400() throws Exception { String parentPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <packaging>pom</packaging> </project> """; String childPomXml = """ <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.example</groupId> <artifactId>parent-project</artifactId> <version>1.0.0</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>com.example</groupId> <artifactId>child-project</artifactId> <version>2.0.0</version> <!-- Child groupId matches parent, but version differs --> </project> """; Document parentDoc = saxBuilder.build(new StringReader(parentPomXml)); Document childDoc = saxBuilder.build(new StringReader(childPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "pom.xml"), parentDoc); pomMap.put(Paths.get("project", "child", "pom.xml"), childDoc); Element childRoot = childDoc.getRootElement(); Element parentElement = childRoot.getChild("parent", childRoot.getNamespace()); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify child groupId was removed (matches parent, can be inferred) assertNull(childRoot.getChild("groupId", childRoot.getNamespace())); // Verify child version was kept (differs from parent, cannot be inferred) assertNotNull(childRoot.getChild("version", childRoot.getNamespace())); // Verify child artifactId was kept (always required) assertNotNull(childRoot.getChild("artifactId", childRoot.getNamespace())); // Parent elements should all remain (no relativePath inference in 4.0.0) assertNotNull(parentElement.getChild("groupId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("artifactId", childRoot.getNamespace())); assertNotNull(parentElement.getChild("version", childRoot.getNamespace())); } @Test @DisplayName("should not apply dependency inference to 4.0.0 models") void shouldNotApplyDependencyInferenceTo400Models() throws Exception { String moduleAPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.0.0") .modelVersion("4.0.0") .groupId("com.example") .artifactId("module-a") .version("1.0.0") .build(); String moduleBPomXml = PomBuilder.create() .namespace("http://maven.apache.org/POM/4.0.0") .modelVersion("4.0.0") .groupId("com.example") .artifactId("module-b") .version("1.0.0") .dependency("com.example", "module-a", "1.0.0") .build(); Document moduleADoc = saxBuilder.build(new StringReader(moduleAPomXml)); Document moduleBDoc = saxBuilder.build(new StringReader(moduleBPomXml)); Map<Path, Document> pomMap = new HashMap<>(); pomMap.put(Paths.get("project", "module-a", "pom.xml"), moduleADoc); pomMap.put(Paths.get("project", "module-b", "pom.xml"), moduleBDoc); Element moduleBRoot = moduleBDoc.getRootElement(); Element dependency = moduleBRoot .getChild("dependencies", moduleBRoot.getNamespace()) .getChildren("dependency", moduleBRoot.getNamespace()) .get(0); // Verify dependency elements exist before inference assertNotNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("version", moduleBRoot.getNamespace())); // Apply inference UpgradeContext context = createMockContext(); strategy.apply(context, pomMap); // Verify dependency inference was NOT applied (all elements should remain for 4.0.0) assertNotNull(dependency.getChild("groupId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("artifactId", moduleBRoot.getNamespace())); assertNotNull(dependency.getChild("version", moduleBRoot.getNamespace())); } } @Nested @DisplayName("Strategy Description") class StrategyDescriptionTests { @Test @DisplayName("should provide meaningful description") void shouldProvideMeaningfulDescription() { String description = strategy.getDescription(); assertNotNull(description, "Description should not be null"); assertFalse(description.trim().isEmpty(), "Description should not be empty"); assertTrue(description.toLowerCase().contains("infer"), "Description should mention inference"); } } }
googleapis/google-cloud-java
37,913
java-assured-workloads/proto-google-cloud-assured-workloads-v1/src/main/java/com/google/cloud/assuredworkloads/v1/AcknowledgeViolationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/assuredworkloads/v1/assuredworkloads.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.assuredworkloads.v1; /** * * * <pre> * Request for acknowledging the violation * Next Id: 4 * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest} */ public final class AcknowledgeViolationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) AcknowledgeViolationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use AcknowledgeViolationRequest.newBuilder() to construct. private AcknowledgeViolationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AcknowledgeViolationRequest() { name_ = ""; comment_ = ""; nonCompliantOrgPolicy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AcknowledgeViolationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_AcknowledgeViolationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_AcknowledgeViolationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.class, com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COMMENT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object comment_ = ""; /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The comment. */ @java.lang.Override public java.lang.String getComment() { java.lang.Object ref = comment_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); comment_ = s; return s; } } /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for comment. */ @java.lang.Override public com.google.protobuf.ByteString getCommentBytes() { java.lang.Object ref = comment_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); comment_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NON_COMPLIANT_ORG_POLICY_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object nonCompliantOrgPolicy_ = ""; /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @return The nonCompliantOrgPolicy. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getNonCompliantOrgPolicy() { java.lang.Object ref = nonCompliantOrgPolicy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nonCompliantOrgPolicy_ = s; return s; } } /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @return The bytes for nonCompliantOrgPolicy. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getNonCompliantOrgPolicyBytes() { java.lang.Object ref = nonCompliantOrgPolicy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nonCompliantOrgPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(comment_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, comment_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nonCompliantOrgPolicy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, nonCompliantOrgPolicy_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(comment_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, comment_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nonCompliantOrgPolicy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, nonCompliantOrgPolicy_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest)) { return super.equals(obj); } com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest other = (com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) obj; if (!getName().equals(other.getName())) return false; if (!getComment().equals(other.getComment())) return false; if (!getNonCompliantOrgPolicy().equals(other.getNonCompliantOrgPolicy())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + COMMENT_FIELD_NUMBER; hash = (53 * hash) + getComment().hashCode(); hash = (37 * hash) + NON_COMPLIANT_ORG_POLICY_FIELD_NUMBER; hash = (53 * hash) + getNonCompliantOrgPolicy().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for acknowledging the violation * Next Id: 4 * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_AcknowledgeViolationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_AcknowledgeViolationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.class, com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.Builder.class); } // Construct using com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; comment_ = ""; nonCompliantOrgPolicy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.assuredworkloads.v1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1_AcknowledgeViolationRequest_descriptor; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest getDefaultInstanceForType() { return com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest build() { com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest buildPartial() { com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest result = new com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.comment_ = comment_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.nonCompliantOrgPolicy_ = nonCompliantOrgPolicy_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) { return mergeFrom((com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest other) { if (other == com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getComment().isEmpty()) { comment_ = other.comment_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getNonCompliantOrgPolicy().isEmpty()) { nonCompliantOrgPolicy_ = other.nonCompliantOrgPolicy_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { comment_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { nonCompliantOrgPolicy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Violation to acknowledge. * Format: * organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object comment_ = ""; /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The comment. */ public java.lang.String getComment() { java.lang.Object ref = comment_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); comment_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for comment. */ public com.google.protobuf.ByteString getCommentBytes() { java.lang.Object ref = comment_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); comment_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The comment to set. * @return This builder for chaining. */ public Builder setComment(java.lang.String value) { if (value == null) { throw new NullPointerException(); } comment_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearComment() { comment_ = getDefaultInstance().getComment(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. Business justification explaining the need for violation acknowledgement * </pre> * * <code>string comment = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for comment to set. * @return This builder for chaining. */ public Builder setCommentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); comment_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object nonCompliantOrgPolicy_ = ""; /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @return The nonCompliantOrgPolicy. */ @java.lang.Deprecated public java.lang.String getNonCompliantOrgPolicy() { java.lang.Object ref = nonCompliantOrgPolicy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nonCompliantOrgPolicy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @return The bytes for nonCompliantOrgPolicy. */ @java.lang.Deprecated public com.google.protobuf.ByteString getNonCompliantOrgPolicyBytes() { java.lang.Object ref = nonCompliantOrgPolicy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nonCompliantOrgPolicy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @param value The nonCompliantOrgPolicy to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setNonCompliantOrgPolicy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nonCompliantOrgPolicy_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearNonCompliantOrgPolicy() { nonCompliantOrgPolicy_ = getDefaultInstance().getNonCompliantOrgPolicy(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. This field is deprecated and will be removed in future version of the API. * Name of the OrgPolicy which was modified with non-compliant change and * resulted in this violation. * Format: * projects/{project_number}/policies/{constraint_name} * folders/{folder_id}/policies/{constraint_name} * organizations/{organization_id}/policies/{constraint_name} * </pre> * * <code> * string non_compliant_org_policy = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated * google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest.non_compliant_org_policy is * deprecated. See google/cloud/assuredworkloads/v1/assuredworkloads.proto;l=584 * @param value The bytes for nonCompliantOrgPolicy to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setNonCompliantOrgPolicyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nonCompliantOrgPolicy_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest) private static final com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest(); } public static com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AcknowledgeViolationRequest> PARSER = new com.google.protobuf.AbstractParser<AcknowledgeViolationRequest>() { @java.lang.Override public AcknowledgeViolationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AcknowledgeViolationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AcknowledgeViolationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.assuredworkloads.v1.AcknowledgeViolationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hudi
37,987
hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieClusteringConfig.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hudi.config; import org.apache.hudi.common.config.ConfigClassProperty; import org.apache.hudi.common.config.ConfigGroups; import org.apache.hudi.common.config.ConfigProperty; import org.apache.hudi.common.config.EnumDescription; import org.apache.hudi.common.config.EnumFieldDescription; import org.apache.hudi.common.config.HoodieConfig; import org.apache.hudi.common.config.TypedProperties; import org.apache.hudi.common.engine.EngineType; import org.apache.hudi.common.util.ValidationUtils; import org.apache.hudi.exception.HoodieNotSupportedException; import org.apache.hudi.index.HoodieIndex; import org.apache.hudi.table.action.cluster.ClusteringPlanPartitionFilterMode; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.Properties; /** * Clustering specific configs. */ @ConfigClassProperty(name = "Clustering Configs", groupName = ConfigGroups.Names.WRITE_CLIENT, description = "Configurations that control the clustering table service in hudi, " + "which optimizes the storage layout for better query performance by sorting and sizing data files.") public class HoodieClusteringConfig extends HoodieConfig { // Any strategy specific params can be saved with this prefix public static final String CLUSTERING_STRATEGY_PARAM_PREFIX = "hoodie.clustering.plan.strategy."; public static final String SPARK_SIZED_BASED_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.SparkSizeBasedClusteringPlanStrategy"; public static final String FLINK_SIZED_BASED_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.FlinkSizeBasedClusteringPlanStrategy"; public static final String FLINK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.FlinkConsistentBucketClusteringPlanStrategy"; public static final String SPARK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.SparkConsistentBucketClusteringPlanStrategy"; public static final String JAVA_SIZED_BASED_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.JavaSizeBasedClusteringPlanStrategy"; public static final String SPARK_STREAM_COPY_CLUSTERING_PLAN_STRATEGY = "org.apache.hudi.client.clustering.plan.strategy.SparkStreamCopyClusteringPlanStrategy"; public static final String SPARK_SORT_AND_SIZE_EXECUTION_STRATEGY = "org.apache.hudi.client.clustering.run.strategy.SparkSortAndSizeExecutionStrategy"; public static final String SPARK_STREAM_COPY_CLUSTERING_EXECUTION_STRATEGY = "org.apache.hudi.client.clustering.run.strategy.SparkStreamCopyClusteringExecutionStrategy"; public static final String SPARK_CONSISTENT_BUCKET_EXECUTION_STRATEGY = "org.apache.hudi.client.clustering.run.strategy.SparkConsistentBucketClusteringExecutionStrategy"; public static final String SINGLE_SPARK_JOB_CONSISTENT_HASHING_EXECUTION_STRATEGY = "org.apache.hudi.client.clustering.run.strategy.SingleSparkJobConsistentHashingExecutionStrategy"; public static final String JAVA_SORT_AND_SIZE_EXECUTION_STRATEGY = "org.apache.hudi.client.clustering.run.strategy.JavaSortAndSizeExecutionStrategy"; public static final String PLAN_PARTITION_FILTER_MODE = "hoodie.clustering.plan.partition.filter.mode"; // Any Space-filling curves optimize(z-order/hilbert) params can be saved with this prefix private static final String LAYOUT_OPTIMIZE_PARAM_PREFIX = "hoodie.layout.optimize."; public static final ConfigProperty<String> DAYBASED_LOOKBACK_PARTITIONS = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "daybased.lookback.partitions") .defaultValue("2") .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Number of partitions to list to create ClusteringPlan"); public static final ConfigProperty<String> PARTITION_FILTER_BEGIN_PARTITION = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "cluster.begin.partition") .noDefaultValue() .markAdvanced() .sinceVersion("0.11.0") .withDocumentation("Begin partition used to filter partition (inclusive), only effective when the filter mode '" + PLAN_PARTITION_FILTER_MODE + "' is " + ClusteringPlanPartitionFilterMode.SELECTED_PARTITIONS.name()); public static final ConfigProperty<String> PARTITION_FILTER_END_PARTITION = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "cluster.end.partition") .noDefaultValue() .markAdvanced() .sinceVersion("0.11.0") .withDocumentation("End partition used to filter partition (inclusive), only effective when the filter mode '" + PLAN_PARTITION_FILTER_MODE + "' is " + ClusteringPlanPartitionFilterMode.SELECTED_PARTITIONS.name()); public static final ConfigProperty<String> PLAN_STRATEGY_SMALL_FILE_LIMIT = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "small.file.limit") .defaultValue(String.valueOf(300 * 1024 * 1024L)) .sinceVersion("0.7.0") .withDocumentation("Files smaller than the size in bytes specified here are candidates for clustering"); public static final ConfigProperty<String> PARTITION_REGEX_PATTERN = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "partition.regex.pattern") .noDefaultValue() .markAdvanced() .sinceVersion("0.11.0") .withDocumentation("Filter clustering partitions that matched regex pattern"); public static final ConfigProperty<String> PARTITION_SELECTED = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "partition.selected") .noDefaultValue() .markAdvanced() .sinceVersion("0.11.0") .withDocumentation("Partitions to run clustering"); public static final ConfigProperty<String> PLAN_STRATEGY_CLASS_NAME = ConfigProperty .key("hoodie.clustering.plan.strategy.class") .defaultValue(SPARK_SIZED_BASED_CLUSTERING_PLAN_STRATEGY) .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Config to provide a strategy class (subclass of ClusteringPlanStrategy) to create clustering plan " + "i.e select what file groups are being clustered. Default strategy, looks at the clustering small file size limit (determined by " + PLAN_STRATEGY_SMALL_FILE_LIMIT.key() + ") to pick the small file slices within partitions for clustering."); public static final ConfigProperty<String> EXECUTION_STRATEGY_CLASS_NAME = ConfigProperty .key("hoodie.clustering.execution.strategy.class") .defaultValue(SPARK_SORT_AND_SIZE_EXECUTION_STRATEGY) .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Config to provide a strategy class (subclass of RunClusteringStrategy) to define how the " + " clustering plan is executed. By default, we sort the file groups in th plan by the specified columns, while " + " meeting the configured target file sizes."); public static final ConfigProperty<String> INLINE_CLUSTERING = ConfigProperty .key("hoodie.clustering.inline") .defaultValue("false") .sinceVersion("0.7.0") .withDocumentation("Turn on inline clustering - clustering will be run after each write operation is complete") .withAlternatives("hoodie.datasource.clustering.inline.enable"); public static final ConfigProperty<String> INLINE_CLUSTERING_MAX_COMMITS = ConfigProperty .key("hoodie.clustering.inline.max.commits") .defaultValue("4") .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Config to control frequency of clustering planning"); public static final ConfigProperty<String> ASYNC_CLUSTERING_MAX_COMMITS = ConfigProperty .key("hoodie.clustering.async.max.commits") .defaultValue("4") .markAdvanced() .sinceVersion("0.9.0") .withDocumentation("Config to control frequency of async clustering"); public static final ConfigProperty<Integer> CLUSTERING_MAX_PARALLELISM = ConfigProperty .key("hoodie.clustering.max.parallelism") .defaultValue(15) .markAdvanced() .sinceVersion("0.14.0") .withDocumentation("Maximum number of parallelism jobs submitted in clustering operation. " + "If the resource is sufficient(Like Spark engine has enough idle executors), increasing this " + "value will let the clustering job run faster, while it will give additional pressure to the " + "execution engines to manage more concurrent running jobs."); public static final ConfigProperty<Integer> CLUSTERING_GROUP_READ_PARALLELISM = ConfigProperty .key("hoodie.clustering.group.read.parallelism") .defaultValue(20) .markAdvanced() .sinceVersion("1.0.0") .withDocumentation("Maximum number of parallelism when Spark read records from clustering group."); public static final ConfigProperty<String> PLAN_STRATEGY_SKIP_PARTITIONS_FROM_LATEST = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "daybased.skipfromlatest.partitions") .defaultValue("0") .markAdvanced() .sinceVersion("0.9.0") .withDocumentation("Number of partitions to skip from latest when choosing partitions to create ClusteringPlan"); public static final ConfigProperty<ClusteringPlanPartitionFilterMode> PLAN_PARTITION_FILTER_MODE_NAME = ConfigProperty .key(PLAN_PARTITION_FILTER_MODE) .defaultValue(ClusteringPlanPartitionFilterMode.NONE) .markAdvanced() .sinceVersion("0.11.0") .withDocumentation(ClusteringPlanPartitionFilterMode.class); public static final ConfigProperty<String> PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "max.bytes.per.group") .defaultValue(String.valueOf(2 * 1024 * 1024 * 1024L)) .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Each clustering operation can create multiple output file groups. Total amount of data processed by clustering operation" + " is defined by below two properties (CLUSTERING_MAX_BYTES_PER_GROUP * CLUSTERING_MAX_NUM_GROUPS)." + " Max amount of data to be included in one group"); public static final ConfigProperty<String> PLAN_STRATEGY_MAX_GROUPS = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "max.num.groups") .defaultValue("30") .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Maximum number of groups to create as part of ClusteringPlan. Increasing groups will increase parallelism"); public static final ConfigProperty<String> PLAN_STRATEGY_TARGET_FILE_MAX_BYTES = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "target.file.max.bytes") .defaultValue(String.valueOf(1024 * 1024 * 1024L)) .sinceVersion("0.7.0") .withDocumentation("Each group can produce 'N' (CLUSTERING_MAX_GROUP_SIZE/CLUSTERING_TARGET_FILE_SIZE) output file groups"); public static final ConfigProperty<Boolean> PLAN_STRATEGY_SINGLE_GROUP_CLUSTERING_ENABLED = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "single.group.clustering.enabled") .defaultValue(true) .markAdvanced() .sinceVersion("0.14.0") .withDocumentation("Whether to generate clustering plan when there is only one file group involved, by default true"); public static final ConfigProperty<String> PLAN_STRATEGY_SORT_COLUMNS = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "sort.columns") .noDefaultValue() .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Columns to sort the data by when clustering"); public static final ConfigProperty<String> UPDATES_STRATEGY = ConfigProperty .key("hoodie.clustering.updates.strategy") .defaultValue("org.apache.hudi.client.clustering.update.strategy.SparkRejectUpdateStrategy") .markAdvanced() .sinceVersion("0.7.0") .withDocumentation("Determines how to handle updates, deletes to file groups that are under clustering." + " Default strategy just rejects the update"); public static final ConfigProperty<String> SCHEDULE_INLINE_CLUSTERING = ConfigProperty .key("hoodie.clustering.schedule.inline") .defaultValue("false") .markAdvanced() .withDocumentation("When set to true, clustering service will be attempted for inline scheduling after each write. Users have to ensure " + "they have a separate job to run async clustering(execution) for the one scheduled by this writer. Users can choose to set both " + "`hoodie.clustering.inline` and `hoodie.clustering.schedule.inline` to false and have both scheduling and execution triggered by any async process, on which " + "case `hoodie.clustering.async.enabled` is expected to be set to true. But if `hoodie.clustering.inline` is set to false, and `hoodie.clustering.schedule.inline` " + "is set to true, regular writers will schedule clustering inline, but users are expected to trigger async job for execution. If `hoodie.clustering.inline` is set " + "to true, regular writers will do both scheduling and execution inline for clustering"); public static final ConfigProperty<String> ASYNC_CLUSTERING_ENABLE = ConfigProperty .key("hoodie.clustering.async.enabled") .defaultValue("false") .sinceVersion("0.7.0") .withDocumentation("Enable running of clustering service, asynchronously as inserts happen on the table.") .withAlternatives("hoodie.datasource.clustering.async.enable"); /** * @deprecated this setting has no effect. Please refer to clustering configuration, as well as * {@link #LAYOUT_OPTIMIZE_STRATEGY} config to enable advanced record layout optimization strategies */ public static final ConfigProperty LAYOUT_OPTIMIZE_ENABLE = ConfigProperty .key(LAYOUT_OPTIMIZE_PARAM_PREFIX + "enable") .defaultValue(false) .markAdvanced() .sinceVersion("0.10.0") .deprecatedAfter("0.11.0") .withDocumentation("This setting has no effect. Please refer to clustering configuration, as well as " + "LAYOUT_OPTIMIZE_STRATEGY config to enable advanced record layout optimization strategies"); /** * Determines ordering strategy in for records layout optimization. * Currently, following strategies are supported * <ul> * <li>Linear: simply orders records lexicographically</li> * <li>Z-order: orders records along Z-order spatial-curve</li> * <li>Hilbert: orders records along Hilbert's spatial-curve</li> * </ul> * * NOTE: "z-order", "hilbert" strategies may consume considerably more compute, than "linear". * Make sure to perform small-scale local testing for your dataset before applying globally. */ public static final ConfigProperty<String> LAYOUT_OPTIMIZE_STRATEGY = ConfigProperty .key(LAYOUT_OPTIMIZE_PARAM_PREFIX + "strategy") .defaultValue(LayoutOptimizationStrategy.LINEAR.name()) .markAdvanced() .sinceVersion("0.10.0") .withDocumentation(LayoutOptimizationStrategy.class); /** * NOTE: This setting only has effect if {@link #LAYOUT_OPTIMIZE_STRATEGY} value is set to * either "z-order" or "hilbert" (ie leveraging space-filling curves) * * Currently, two methods to order records along the curve are supported "build" and "sample": * * <ul> * <li>Direct: entails that spatial curve will be built in full, "filling in" all of the individual * points corresponding to each individual record</li> * <li>Sample: leverages boundary-base interleaved index method (described in more details in * Amazon DynamoDB blog [1])</li> * </ul> * * NOTE: Boundary-based interleaved Index method has better generalization, * but is slower than direct method. * * Please refer to RFC-28 for specific elaboration on both flows. * * [1] https://aws.amazon.com/cn/blogs/database/tag/z-order/ */ public static final ConfigProperty<String> LAYOUT_OPTIMIZE_SPATIAL_CURVE_BUILD_METHOD = ConfigProperty .key(LAYOUT_OPTIMIZE_PARAM_PREFIX + "curve.build.method") .defaultValue(SpatialCurveCompositionStrategyType.DIRECT.name()) .markAdvanced() .sinceVersion("0.10.0") .withDocumentation(SpatialCurveCompositionStrategyType.class); /** * NOTE: This setting only has effect if {@link #LAYOUT_OPTIMIZE_SPATIAL_CURVE_BUILD_METHOD} value * is set to "sample" * * Determines target sample size used by the Boundary-based Interleaved Index method. * Larger sample size entails better layout optimization outcomes, at the expense of higher memory * footprint. */ public static final ConfigProperty<String> LAYOUT_OPTIMIZE_BUILD_CURVE_SAMPLE_SIZE = ConfigProperty .key(LAYOUT_OPTIMIZE_PARAM_PREFIX + "build.curve.sample.size") .defaultValue("200000") .markAdvanced() .sinceVersion("0.10.0") .withDocumentation("Determines target sample size used by the Boundary-based Interleaved Index method " + "of building space-filling curve. Larger sample size entails better layout optimization outcomes, " + "at the expense of higher memory footprint."); /** * @deprecated this setting has no effect */ public static final ConfigProperty LAYOUT_OPTIMIZE_DATA_SKIPPING_ENABLE = ConfigProperty .key(LAYOUT_OPTIMIZE_PARAM_PREFIX + "data.skipping.enable") .defaultValue(true) .markAdvanced() .sinceVersion("0.10.0") .deprecatedAfter("0.11.0") .withDocumentation("Enable data skipping by collecting statistics once layout optimization is complete."); public static final ConfigProperty<Boolean> ROLLBACK_PENDING_CLUSTERING_ON_CONFLICT = ConfigProperty .key("hoodie.clustering.rollback.pending.replacecommit.on.conflict") .defaultValue(false) .markAdvanced() .sinceVersion("0.10.0") .withDocumentation("If updates are allowed to file groups pending clustering, then set this config to rollback failed or pending clustering instants. " + "Pending clustering will be rolled back ONLY IF there is conflict between incoming upsert and filegroup to be clustered. " + "Please exercise caution while setting this config, especially when clustering is done very frequently. This could lead to race condition in " + "rare scenarios, for example, when the clustering completes after instants are fetched but before rollback completed."); public static final ConfigProperty<Boolean> FILE_STITCHING_BINARY_COPY_SCHEMA_EVOLUTION_ENABLE = ConfigProperty .key(CLUSTERING_STRATEGY_PARAM_PREFIX + "binary.copy.schema.evolution.enable") .defaultValue(false) .markAdvanced() .sinceVersion("1.1.0") .withDocumentation("Enable schema evolution support for binary file stitching during clustering. " + "When enabled, allows clustering of files with different but compatible schemas (e.g., files with added columns). " + "When disabled (default), only files with identical schemas will be clustered together, providing better performance " + "but requiring schema consistency across all files in a clustering group."); /** * @deprecated Use {@link #PLAN_STRATEGY_CLASS_NAME} and its methods instead */ @Deprecated public static final String CLUSTERING_PLAN_STRATEGY_CLASS = PLAN_STRATEGY_CLASS_NAME.key(); /** * @deprecated Use {@link #PLAN_STRATEGY_CLASS_NAME} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_PLAN_STRATEGY_CLASS = PLAN_STRATEGY_CLASS_NAME.defaultValue(); /** * @deprecated Use {@link #EXECUTION_STRATEGY_CLASS_NAME} and its methods instead */ @Deprecated public static final String CLUSTERING_EXECUTION_STRATEGY_CLASS = EXECUTION_STRATEGY_CLASS_NAME.key(); /** * @deprecated Use {@link #EXECUTION_STRATEGY_CLASS_NAME} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_EXECUTION_STRATEGY_CLASS = EXECUTION_STRATEGY_CLASS_NAME.defaultValue(); /** * @deprecated Use {@link #INLINE_CLUSTERING} and its methods instead */ @Deprecated public static final String INLINE_CLUSTERING_PROP = INLINE_CLUSTERING.key(); /** * @deprecated Use {@link #INLINE_CLUSTERING} and its methods instead */ @Deprecated private static final String DEFAULT_INLINE_CLUSTERING = INLINE_CLUSTERING.defaultValue(); /** * @deprecated Use {@link #INLINE_CLUSTERING_MAX_COMMITS} and its methods instead */ @Deprecated public static final String INLINE_CLUSTERING_MAX_COMMIT_PROP = INLINE_CLUSTERING_MAX_COMMITS.key(); /** * @deprecated Use {@link #INLINE_CLUSTERING_MAX_COMMITS} and its methods instead */ @Deprecated private static final String DEFAULT_INLINE_CLUSTERING_NUM_COMMITS = INLINE_CLUSTERING_MAX_COMMITS.defaultValue(); /** * @deprecated Use {@link #DAYBASED_LOOKBACK_PARTITIONS} and its methods instead */ @Deprecated public static final String CLUSTERING_TARGET_PARTITIONS = DAYBASED_LOOKBACK_PARTITIONS.key(); /** * @deprecated Use {@link #DAYBASED_LOOKBACK_PARTITIONS} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_TARGET_PARTITIONS = DAYBASED_LOOKBACK_PARTITIONS.defaultValue(); /** * @deprecated Use {@link #PLAN_STRATEGY_SMALL_FILE_LIMIT} and its methods instead */ @Deprecated public static final String CLUSTERING_PLAN_SMALL_FILE_LIMIT = PLAN_STRATEGY_SMALL_FILE_LIMIT.key(); /** * @deprecated Use {@link #PLAN_STRATEGY_SMALL_FILE_LIMIT} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_PLAN_SMALL_FILE_LIMIT = PLAN_STRATEGY_SMALL_FILE_LIMIT.defaultValue(); /** * @deprecated Use {@link #PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP} and its methods instead */ @Deprecated public static final String CLUSTERING_MAX_BYTES_PER_GROUP = PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP.key(); /** * @deprecated Use {@link #PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_MAX_GROUP_SIZE = PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP.defaultValue(); /** * @deprecated Use {@link #PLAN_STRATEGY_MAX_GROUPS} and its methods instead */ @Deprecated public static final String CLUSTERING_MAX_NUM_GROUPS = PLAN_STRATEGY_MAX_GROUPS.key(); /** * @deprecated Use {@link #PLAN_STRATEGY_MAX_GROUPS} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_MAX_NUM_GROUPS = PLAN_STRATEGY_MAX_GROUPS.defaultValue(); /** * @deprecated Use {@link #PLAN_STRATEGY_TARGET_FILE_MAX_BYTES} and its methods instead */ @Deprecated public static final String CLUSTERING_TARGET_FILE_MAX_BYTES = PLAN_STRATEGY_TARGET_FILE_MAX_BYTES.key(); /** * @deprecated Use {@link #PLAN_STRATEGY_TARGET_FILE_MAX_BYTES} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_TARGET_FILE_MAX_BYTES = PLAN_STRATEGY_TARGET_FILE_MAX_BYTES.defaultValue(); /** * @deprecated Use {@link #PLAN_STRATEGY_SORT_COLUMNS} and its methods instead */ @Deprecated public static final String CLUSTERING_SORT_COLUMNS_PROPERTY = PLAN_STRATEGY_SORT_COLUMNS.key(); /** * @deprecated Use {@link #UPDATES_STRATEGY} and its methods instead */ @Deprecated public static final String CLUSTERING_UPDATES_STRATEGY_PROP = UPDATES_STRATEGY.key(); /** * @deprecated Use {@link #UPDATES_STRATEGY} and its methods instead */ @Deprecated public static final String DEFAULT_CLUSTERING_UPDATES_STRATEGY = UPDATES_STRATEGY.defaultValue(); /** * @deprecated Use {@link #ASYNC_CLUSTERING_ENABLE} and its methods instead */ @Deprecated public static final String ASYNC_CLUSTERING_ENABLE_OPT_KEY = ASYNC_CLUSTERING_ENABLE.key(); /** @deprecated Use {@link #ASYNC_CLUSTERING_ENABLE} and its methods instead */ @Deprecated public static final String DEFAULT_ASYNC_CLUSTERING_ENABLE_OPT_VAL = ASYNC_CLUSTERING_ENABLE.defaultValue(); // NOTE: This ctor is required for appropriate deserialization public HoodieClusteringConfig() { super(); } public boolean isAsyncClusteringEnabled() { return getBooleanOrDefault(HoodieClusteringConfig.ASYNC_CLUSTERING_ENABLE); } public boolean isInlineClusteringEnabled() { return getBooleanOrDefault(HoodieClusteringConfig.INLINE_CLUSTERING); } public static HoodieClusteringConfig from(TypedProperties props) { return HoodieClusteringConfig.newBuilder().fromProperties(props).build(); } public static Builder newBuilder() { return new Builder(); } public static class Builder { private final HoodieClusteringConfig clusteringConfig = new HoodieClusteringConfig(); private EngineType engineType = EngineType.SPARK; public Builder withEngineType(EngineType engineType) { this.engineType = engineType; return this; } public Builder fromFile(File propertiesFile) throws IOException { try (FileReader reader = new FileReader(propertiesFile)) { this.clusteringConfig.getProps().load(reader); return this; } } public Builder withClusteringPlanStrategyClass(String clusteringStrategyClass) { clusteringConfig.setValue(PLAN_STRATEGY_CLASS_NAME, clusteringStrategyClass); return this; } public Builder withSingleGroupClusteringEnabled(Boolean enabled) { clusteringConfig.setValue(PLAN_STRATEGY_SINGLE_GROUP_CLUSTERING_ENABLED, String.valueOf(enabled)); return this; } public Builder withClusteringPlanPartitionFilterMode(ClusteringPlanPartitionFilterMode mode) { clusteringConfig.setValue(PLAN_PARTITION_FILTER_MODE_NAME.key(), mode.toString()); return this; } public Builder withClusteringExecutionStrategyClass(String runClusteringStrategyClass) { clusteringConfig.setValue(EXECUTION_STRATEGY_CLASS_NAME, runClusteringStrategyClass); return this; } public Builder withClusteringTargetPartitions(int clusteringTargetPartitions) { clusteringConfig.setValue(DAYBASED_LOOKBACK_PARTITIONS, String.valueOf(clusteringTargetPartitions)); return this; } public Builder withClusteringPartitionRegexPattern(String pattern) { clusteringConfig.setValue(PARTITION_REGEX_PATTERN, pattern); return this; } public Builder withClusteringPartitionSelected(String partitionSelected) { clusteringConfig.setValue(PARTITION_SELECTED, partitionSelected); return this; } public Builder withClusteringSkipPartitionsFromLatest(int clusteringSkipPartitionsFromLatest) { clusteringConfig.setValue(PLAN_STRATEGY_SKIP_PARTITIONS_FROM_LATEST, String.valueOf(clusteringSkipPartitionsFromLatest)); return this; } public Builder withClusteringPartitionFilterBeginPartition(String begin) { clusteringConfig.setValue(PARTITION_FILTER_BEGIN_PARTITION, begin); return this; } public Builder withClusteringPartitionFilterEndPartition(String end) { clusteringConfig.setValue(PARTITION_FILTER_END_PARTITION, end); return this; } public Builder withClusteringPlanSmallFileLimit(long clusteringSmallFileLimit) { clusteringConfig.setValue(PLAN_STRATEGY_SMALL_FILE_LIMIT, String.valueOf(clusteringSmallFileLimit)); return this; } public Builder withClusteringSortColumns(String sortColumns) { clusteringConfig.setValue(PLAN_STRATEGY_SORT_COLUMNS, sortColumns); return this; } public Builder withClusteringMaxBytesInGroup(long clusteringMaxGroupSize) { clusteringConfig.setValue(PLAN_STRATEGY_MAX_BYTES_PER_OUTPUT_FILEGROUP, String.valueOf(clusteringMaxGroupSize)); return this; } public Builder withClusteringMaxNumGroups(int maxNumGroups) { clusteringConfig.setValue(PLAN_STRATEGY_MAX_GROUPS, String.valueOf(maxNumGroups)); return this; } public Builder withClusteringTargetFileMaxBytes(long targetFileSize) { clusteringConfig.setValue(PLAN_STRATEGY_TARGET_FILE_MAX_BYTES, String.valueOf(targetFileSize)); return this; } public Builder withInlineClustering(Boolean inlineClustering) { clusteringConfig.setValue(INLINE_CLUSTERING, String.valueOf(inlineClustering)); return this; } public Builder withScheduleInlineClustering(Boolean scheduleInlineClustering) { clusteringConfig.setValue(SCHEDULE_INLINE_CLUSTERING, String.valueOf(scheduleInlineClustering)); return this; } public Builder withInlineClusteringNumCommits(int numCommits) { clusteringConfig.setValue(INLINE_CLUSTERING_MAX_COMMITS, String.valueOf(numCommits)); return this; } public Builder withAsyncClusteringMaxCommits(int numCommits) { clusteringConfig.setValue(ASYNC_CLUSTERING_MAX_COMMITS, String.valueOf(numCommits)); return this; } public Builder fromProperties(Properties props) { // TODO this should cherry-pick only clustering properties this.clusteringConfig.getProps().putAll(props); return this; } public Builder withClusteringUpdatesStrategy(String updatesStrategyClass) { clusteringConfig.setValue(UPDATES_STRATEGY, updatesStrategyClass); return this; } public Builder withAsyncClustering(Boolean asyncClustering) { clusteringConfig.setValue(ASYNC_CLUSTERING_ENABLE, String.valueOf(asyncClustering)); return this; } public Builder withRollbackPendingClustering(Boolean rollbackPendingClustering) { clusteringConfig.setValue(ROLLBACK_PENDING_CLUSTERING_ON_CONFLICT, String.valueOf(rollbackPendingClustering)); return this; } public Builder withFileStitchingBinaryCopySchemaEvolutionEnabled(Boolean enabled) { clusteringConfig.setValue(FILE_STITCHING_BINARY_COPY_SCHEMA_EVOLUTION_ENABLE, String.valueOf(enabled)); return this; } public Builder withDataOptimizeStrategy(String strategy) { clusteringConfig.setValue(LAYOUT_OPTIMIZE_STRATEGY, strategy); return this; } public Builder withDataOptimizeBuildCurveStrategy(String method) { clusteringConfig.setValue(LAYOUT_OPTIMIZE_SPATIAL_CURVE_BUILD_METHOD, method); return this; } public Builder withDataOptimizeBuildCurveSampleNumber(int sampleNumber) { clusteringConfig.setValue(LAYOUT_OPTIMIZE_BUILD_CURVE_SAMPLE_SIZE, String.valueOf(sampleNumber)); return this; } public HoodieClusteringConfig build() { setDefaults(); validate(); return clusteringConfig; } private void setDefaults() { clusteringConfig.setDefaultValue(PLAN_STRATEGY_CLASS_NAME, getDefaultPlanStrategyClassName(engineType)); clusteringConfig.setDefaultValue(EXECUTION_STRATEGY_CLASS_NAME, getDefaultExecutionStrategyClassName(engineType)); clusteringConfig.setDefaults(HoodieClusteringConfig.class.getName()); } private void validate() { boolean inlineCluster = clusteringConfig.getBoolean(HoodieClusteringConfig.INLINE_CLUSTERING); boolean inlineClusterSchedule = clusteringConfig.getBoolean(HoodieClusteringConfig.SCHEDULE_INLINE_CLUSTERING); ValidationUtils.checkArgument(!(inlineCluster && inlineClusterSchedule), String.format("Either of inline clustering (%s) or " + "schedule inline clustering (%s) can be enabled. Both can't be set to true at the same time. %s,%s", HoodieClusteringConfig.INLINE_CLUSTERING.key(), HoodieClusteringConfig.SCHEDULE_INLINE_CLUSTERING.key(), inlineCluster, inlineClusterSchedule)); if (isConsistentHashingBucketIndex()) { String planStrategy = clusteringConfig.getString(PLAN_STRATEGY_CLASS_NAME); if (engineType == EngineType.FLINK) { ValidationUtils.checkArgument(planStrategy.equalsIgnoreCase(FLINK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY), "Consistent hashing bucket index only supports clustering plan strategy : " + FLINK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY); } else { ValidationUtils.checkArgument( planStrategy.equalsIgnoreCase(SPARK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY), "Consistent hashing bucket index only supports clustering plan strategy : " + SPARK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY); String clusteringConfigString = clusteringConfig.getString(EXECUTION_STRATEGY_CLASS_NAME); ValidationUtils.checkArgument( clusteringConfigString.equals(SPARK_CONSISTENT_BUCKET_EXECUTION_STRATEGY) || clusteringConfigString.equals(SINGLE_SPARK_JOB_CONSISTENT_HASHING_EXECUTION_STRATEGY), "Consistent hashing bucket index only supports clustering execution strategy : " + SPARK_CONSISTENT_BUCKET_EXECUTION_STRATEGY + " or " + SINGLE_SPARK_JOB_CONSISTENT_HASHING_EXECUTION_STRATEGY); } } } private boolean isConsistentHashingBucketIndex() { return clusteringConfig.contains(HoodieIndexConfig.INDEX_TYPE.key()) && clusteringConfig.contains(HoodieIndexConfig.BUCKET_INDEX_ENGINE_TYPE.key()) && clusteringConfig.getString(HoodieIndexConfig.INDEX_TYPE.key()).equalsIgnoreCase(HoodieIndex.IndexType.BUCKET.name()) && clusteringConfig.getString(HoodieIndexConfig.BUCKET_INDEX_ENGINE_TYPE.key()).equalsIgnoreCase(HoodieIndex.BucketIndexEngineType.CONSISTENT_HASHING.name()); } private String getDefaultPlanStrategyClassName(EngineType engineType) { switch (engineType) { case SPARK: return isConsistentHashingBucketIndex() ? SPARK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY : SPARK_SIZED_BASED_CLUSTERING_PLAN_STRATEGY; case FLINK: return isConsistentHashingBucketIndex() ? FLINK_CONSISTENT_BUCKET_CLUSTERING_PLAN_STRATEGY : FLINK_SIZED_BASED_CLUSTERING_PLAN_STRATEGY; case JAVA: return JAVA_SIZED_BASED_CLUSTERING_PLAN_STRATEGY; default: throw new HoodieNotSupportedException("Unsupported engine " + engineType); } } private String getDefaultExecutionStrategyClassName(EngineType engineType) { switch (engineType) { case SPARK: return isConsistentHashingBucketIndex() ? SINGLE_SPARK_JOB_CONSISTENT_HASHING_EXECUTION_STRATEGY : SPARK_SORT_AND_SIZE_EXECUTION_STRATEGY; case FLINK: case JAVA: return JAVA_SORT_AND_SIZE_EXECUTION_STRATEGY; default: throw new HoodieNotSupportedException("Unsupported engine " + engineType); } } } /** * Type of strategy for building Z-order/Hilbert space-filling curves. */ @EnumDescription("This configuration only has effect if `hoodie.layout.optimize.strategy` is " + "set to either \"z-order\" or \"hilbert\" (i.e. leveraging space-filling curves). This " + "configuration controls the type of a strategy to use for building the space-filling " + "curves, tackling specifically how the Strings are ordered based on the curve. " + "Since we truncate the String to 8 bytes for ordering, there are two issues: (1) it " + "can lead to poor aggregation effect, (2) the truncation of String longer than 8 bytes " + "loses the precision, if the Strings are different but the 8-byte prefix is the same. " + "The boundary-based interleaved index method (\"SAMPLE\") has better generalization, " + "solving the two problems above, but is slower than direct method (\"DIRECT\"). " + "User should benchmark the write and query performance before tweaking this in " + "production, if this is actually a problem. Please refer to RFC-28 for more details.") public enum SpatialCurveCompositionStrategyType { @EnumFieldDescription("This strategy builds the spatial curve in full, filling in all of " + "the individual points corresponding to each individual record, which requires less " + "compute.") DIRECT, @EnumFieldDescription("This strategy leverages boundary-base interleaved index method " + "(described in more details in Amazon DynamoDB blog " + "https://aws.amazon.com/cn/blogs/database/tag/z-order/) and produces a better layout " + "compared to DIRECT strategy. It requires more compute and is slower.") SAMPLE } /** * Layout optimization strategies such as Z-order/Hilbert space-curves, etc */ @EnumDescription("Determines ordering strategy for records layout optimization.") public enum LayoutOptimizationStrategy { @EnumFieldDescription("Orders records lexicographically") LINEAR, @EnumFieldDescription("Orders records along Z-order spatial-curve.") ZORDER, @EnumFieldDescription("Orders records along Hilbert's spatial-curve.") HILBERT } public static LayoutOptimizationStrategy resolveLayoutOptimizationStrategy(String cfgVal) { if (cfgVal.equalsIgnoreCase("z-order")) { return LayoutOptimizationStrategy.ZORDER; } return LayoutOptimizationStrategy.valueOf(cfgVal.toUpperCase()); } }
apache/incubator-kie-drools
38,339
drools-model/drools-model-codegen/src/main/java/org/drools/model/codegen/execmodel/generator/visitor/accumulate/AccumulateVisitor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.drools.model.codegen.execmodel.generator.visitor.accumulate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import com.github.javaparser.ast.Node; import com.github.javaparser.ast.NodeList; import com.github.javaparser.ast.body.Parameter; import com.github.javaparser.ast.expr.BinaryExpr; import com.github.javaparser.ast.expr.Expression; import com.github.javaparser.ast.expr.LambdaExpr; import com.github.javaparser.ast.expr.LiteralExpr; import com.github.javaparser.ast.expr.MethodCallExpr; import com.github.javaparser.ast.expr.MethodReferenceExpr; import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.ast.expr.ObjectCreationExpr; import com.github.javaparser.ast.expr.UnaryExpr; import com.github.javaparser.ast.stmt.BlockStmt; import com.github.javaparser.ast.stmt.ExpressionStmt; import com.github.javaparser.ast.type.ClassOrInterfaceType; import org.drools.base.rule.Pattern; import org.drools.compiler.rule.builder.util.AccumulateUtil; import org.drools.core.base.accumulators.CollectAccumulator; import org.drools.core.base.accumulators.CollectListAccumulateFunction; import org.drools.core.base.accumulators.CollectSetAccumulateFunction; import org.drools.drl.ast.descr.AccumulateDescr; import org.drools.drl.ast.descr.AndDescr; import org.drools.drl.ast.descr.BaseDescr; import org.drools.drl.ast.descr.PatternDescr; import org.drools.model.codegen.execmodel.PackageModel; import org.drools.model.codegen.execmodel.errors.InvalidExpressionErrorResult; import org.drools.model.codegen.execmodel.generator.DeclarationSpec; import org.drools.model.codegen.execmodel.generator.DrlxParseUtil; import org.drools.model.codegen.execmodel.generator.RuleContext; import org.drools.model.codegen.execmodel.generator.TypedDeclarationSpec; import org.drools.model.codegen.execmodel.generator.TypedExpression; import org.drools.model.codegen.execmodel.generator.drlxparse.ConstraintParser; import org.drools.model.codegen.execmodel.generator.drlxparse.DrlxParseFail; import org.drools.model.codegen.execmodel.generator.drlxparse.DrlxParseResult; import org.drools.model.codegen.execmodel.generator.drlxparse.DrlxParseSuccess; import org.drools.model.codegen.execmodel.generator.drlxparse.ParseResultVisitor; import org.drools.model.codegen.execmodel.generator.drlxparse.SingleDrlxParseSuccess; import org.drools.model.codegen.execmodel.generator.expression.AbstractExpressionBuilder; import org.drools.model.codegen.execmodel.generator.expression.PatternExpressionBuilder; import org.drools.model.codegen.execmodel.generator.expressiontyper.ExpressionTyper; import org.drools.model.codegen.execmodel.generator.expressiontyper.ExpressionTyperContext; import org.drools.model.codegen.execmodel.generator.visitor.ModelGeneratorVisitor; import org.drools.model.codegen.execmodel.util.LambdaUtil; import org.drools.modelcompiler.constraints.GenericCollectAccumulator; import org.drools.mvel.parser.ast.expr.DrlNameExpr; import org.kie.api.runtime.rule.AccumulateFunction; import org.kie.internal.builder.conf.AccumulateFunctionOption; import static java.util.Optional.ofNullable; import static java.util.stream.Collectors.toList; import static org.drools.model.codegen.execmodel.generator.DrlxParseUtil.getLiteralExpressionType; import static org.drools.model.codegen.execmodel.generator.DrlxParseUtil.validateDuplicateBindings; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.ACCUMULATE_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.ACC_FUNCTION_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.AND_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.BIND_AS_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.BIND_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.REACT_ON_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.VALUE_OF_CALL; import static org.drools.model.codegen.execmodel.generator.DslMethodNames.createDslTopLevelMethod; import static org.drools.model.codegen.execmodel.generator.visitor.FromCollectVisitor.GENERIC_COLLECT; import static org.drools.model.codegen.execmodel.util.lambdareplace.ReplaceTypeInLambda.replaceTypeInExprLambdaAndIndex; import static org.drools.mvel.parser.printer.PrintUtil.printNode; public class AccumulateVisitor { protected final RuleContext context; private final PackageModel packageModel; private final ModelGeneratorVisitor modelGeneratorVisitor; private final AbstractExpressionBuilder expressionBuilder; public AccumulateVisitor(ModelGeneratorVisitor modelGeneratorVisitor, RuleContext context, PackageModel packageModel) { this.context = context; this.modelGeneratorVisitor = modelGeneratorVisitor; this.packageModel = packageModel; this.expressionBuilder = new PatternExpressionBuilder(context); } public void visit(AccumulateDescr descr, PatternDescr basePattern) { final MethodCallExpr accumulateDSL = createDslTopLevelMethod(ACCUMULATE_CALL); context.addExpression(accumulateDSL); final MethodCallExpr accumulateExprs = createDslTopLevelMethod(AND_CALL); accumulateDSL.addArgument(accumulateExprs); this.context.pushScope(descr); pushAccumulateContext( accumulateExprs ); try { Set<String> externalDeclrs = new HashSet<>( context.getAvailableBindings() ); BaseDescr input = descr.getInputPattern() == null ? descr.getInput() : descr.getInputPattern(); input.accept( modelGeneratorVisitor ); if ( accumulateExprs.getArguments().isEmpty() ) { accumulateDSL.remove( accumulateExprs ); } else if ( accumulateExprs.getArguments().size() == 1 ) { accumulateDSL.setArgument( 0, accumulateExprs.getArguments().get( 0 ) ); } if ( !descr.getFunctions().isEmpty() ) { if ( validateBindings(descr) ) { return; } processAccumulateFunctions( descr, basePattern, input, accumulateDSL ); } else if ( descr.getFunctions().isEmpty() && descr.getInitCode() != null ) { new AccumulateInlineVisitor( context, packageModel ).inlineAccumulate( descr, basePattern, accumulateDSL, externalDeclrs, input ); } else { throw new UnsupportedOperationException( "Unknown type of Accumulate." ); } } finally { context.popExprPointer(); this.context.popScope(); } } protected void processAccumulateFunctions(AccumulateDescr descr, PatternDescr basePattern, BaseDescr input, MethodCallExpr accumulateDSL) { for (AccumulateDescr.AccumulateFunctionCallDescr function : descr.getFunctions()) { try { Optional<NewBinding> optNewBinding = visit(function, basePattern, input, accumulateDSL); processNewBinding(optNewBinding, accumulateDSL); } catch (AccumulateNonExistingFunction e) { addNonExistingFunctionError(context, e.function); return; } catch (AccumulateParsingFailedException e) { context.addCompilationError(new InvalidExpressionErrorResult(e.getMessage(), Optional.of(context.getRuleDescr()))); return; } } } private boolean validateBindings(AccumulateDescr descr) { final List<String> allBindings = descr .getFunctions() .stream() .map(AccumulateDescr.AccumulateFunctionCallDescr::getBind) .filter(Objects::nonNull) .collect(toList()); final Optional<InvalidExpressionErrorResult> invalidExpressionErrorResult = validateDuplicateBindings(context.getRuleName(), allBindings); invalidExpressionErrorResult.ifPresent(context::addCompilationError); return invalidExpressionErrorResult.isPresent(); } private Optional<NewBinding> visit(AccumulateDescr.AccumulateFunctionCallDescr function, PatternDescr basePattern, BaseDescr input, MethodCallExpr accumulateDSL) { context.pushExprPointer(accumulateDSL::addArgument); try { final MethodCallExpr functionDSL = createDslTopLevelMethod(ACC_FUNCTION_CALL ); final String optBindingId = ofNullable( function.getBind() ).orElse( basePattern.getIdentifier() ); final String bindingId = context.getOutOfScopeVar( ofNullable( optBindingId ).orElse( context.getOrCreateAccumulatorBindingId( function.getFunction() ) ) ); Optional<NewBinding> optNewBinding = Optional.empty(); if ( function.getParams().length == 0 ) { final AccumulateFunction optAccumulateFunction = getAccumulateFunction( function, Object.class ); zeroParameterFunction( basePattern, functionDSL, bindingId, optAccumulateFunction ); } else if (function.getParams().length == 1) { optNewBinding = parseFirstParameter( basePattern, input, function, functionDSL, bindingId ); } else { throw new AccumulateParsingFailedException( "Function \"" + function.getFunction() + "\" cannot have more than 1 parameter"); } if ( bindingId != null ) { final MethodCallExpr asDSL = new MethodCallExpr( functionDSL, BIND_AS_CALL ); asDSL.addArgument( context.getVarExpr( bindingId, DrlxParseUtil.toVar(bindingId) ) ); accumulateDSL.addArgument( asDSL ); } return optNewBinding; } finally { context.popExprPointer(); } } private Optional<NewBinding> parseFirstParameter(PatternDescr basePattern, BaseDescr input, AccumulateDescr.AccumulateFunctionCallDescr function, MethodCallExpr functionDSL, String bindingId) { final String accumulateFunctionParameterStr = function.getParams()[0]; final Expression accumulateFunctionParameter = DrlxParseUtil.parseExpression(accumulateFunctionParameterStr).getExpr(); if (accumulateFunctionParameter instanceof BinaryExpr || accumulateFunctionParameter instanceof UnaryExpr) { return bindingParameter(basePattern, function, functionDSL, bindingId, accumulateFunctionParameterStr); } if (parameterNeedsConvertionToMethodCallExpr(accumulateFunctionParameter)) { return methodCallExprParameter(basePattern, input, function, functionDSL, bindingId, accumulateFunctionParameter); } if (accumulateFunctionParameter instanceof DrlNameExpr) { nameExprParameter(basePattern, function, functionDSL, bindingId, accumulateFunctionParameter); } else if (accumulateFunctionParameter instanceof LiteralExpr) { literalExprParameter(basePattern, function, functionDSL, bindingId, accumulateFunctionParameter); } else { throw new AccumulateParsingFailedException( "The expression \"" + accumulateFunctionParameterStr + "\" in function \"" + function.getFunction() + "\" of type \"" + accumulateFunctionParameter.getClass().getSimpleName() + "\" is not managed in " + this.getClass().getSimpleName()); } return Optional.empty(); } private void literalExprParameter(PatternDescr basePattern, AccumulateDescr.AccumulateFunctionCallDescr function, MethodCallExpr functionDSL, String bindingId, Expression accumulateFunctionParameter) { final Class<?> declarationClass = getLiteralExpressionType((LiteralExpr) accumulateFunctionParameter); AccumulateFunction accumulateFunction = getAccumulateFunction(function, declarationClass); validateAccFunctionTypeAgainstPatternType(context, basePattern, accumulateFunction); functionDSL.addArgument(createAccSupplierExpr(accumulateFunction)); functionDSL.addArgument(createDslTopLevelMethod(VALUE_OF_CALL, NodeList.nodeList(accumulateFunctionParameter))); addBindingAsDeclaration(context, bindingId, accumulateFunction.getResultType()); } private void nameExprParameter(PatternDescr basePattern, AccumulateDescr.AccumulateFunctionCallDescr function, MethodCallExpr functionDSL, String bindingId, Expression accumulateFunctionParameter) { String nameExpr = ((DrlNameExpr) accumulateFunctionParameter).getName().asString(); Optional<DeclarationSpec> declaration = context.getTypedDeclarationById(nameExpr).map(DeclarationSpec.class::cast); if (declaration.isEmpty()) { String name = nameExpr; declaration = context.getAllDeclarations().stream().filter( d -> d.getVariableName().map( n -> n.equals( name ) ).orElse( false ) ).findFirst(); if ( declaration.isPresent() ) { nameExpr = declaration.get().getBindingId(); } else { throw new RuntimeException("Unknown parameter in accumulate: " + name); } } DeclarationSpec decSpec = declaration.get(); String accumulateFunctionName = AccumulateUtil.getFunctionName(() -> decSpec.getDeclarationClass(), function.getFunction()); if (GENERIC_COLLECT.equals(accumulateFunctionName)) { String collectorType = basePattern.getObjectType(); MethodReferenceExpr collectorSupplierExpr = new MethodReferenceExpr(new NameExpr(collectorType), new NodeList<>(), "new"); ObjectCreationExpr accumulatorConstructor = new ObjectCreationExpr(null, new ClassOrInterfaceType(null, GenericCollectAccumulator.class.getCanonicalName()), NodeList.nodeList(collectorSupplierExpr)); functionDSL.addArgument(new LambdaExpr(new NodeList<>(), accumulatorConstructor)); functionDSL.addArgument(context.getVarExpr(nameExpr)); return; } AccumulateFunction accumulateFunction = getAccumulateFunction(accumulateFunctionName, function); validateAccFunctionTypeAgainstPatternType(context, basePattern, accumulateFunction); functionDSL.addArgument(createAccSupplierExpr(accumulateFunction)); functionDSL.addArgument(context.getVarExpr(nameExpr)); addBindingAsDeclaration(context, bindingId, accumulateFunction.getResultType()); } private Optional<NewBinding> methodCallExprParameter(PatternDescr basePattern, BaseDescr input, AccumulateDescr.AccumulateFunctionCallDescr function, MethodCallExpr functionDSL, String bindingId, Expression accumulateFunctionParameter) { final Expression parameterConverted = convertParameter(accumulateFunctionParameter); final DrlxParseUtil.RemoveRootNodeResult methodCallWithoutRootNode = DrlxParseUtil.removeRootNode(parameterConverted); String rootNodeName = getRootNodeName(methodCallWithoutRootNode); Optional<TypedDeclarationSpec> decl = context.getTypedDeclarationById(rootNodeName); Class<?> clazz = decl.<Class<?>>map(TypedDeclarationSpec::getDeclarationClass) .orElseGet( () -> { try { return context.getTypeResolver().resolveType(rootNodeName); } catch (ClassNotFoundException e) { throw new RuntimeException( e ); } } ); final ExpressionTyperContext expressionTyperContext = new ExpressionTyperContext(); final ExpressionTyper expressionTyper = new ExpressionTyper(context, clazz, bindingId, false, expressionTyperContext); TypedExpression typedExpression = expressionTyper .toTypedExpression(methodCallWithoutRootNode.getWithoutRootNode()) .typedExpressionOrException(); final Class<?> methodCallExprType = typedExpression.getRawClass(); final AccumulateFunction accumulateFunction = getAccumulateFunction(function, methodCallExprType); validateAccFunctionTypeAgainstPatternType(context, basePattern, accumulateFunction); functionDSL.addArgument(createAccSupplierExpr(accumulateFunction)); // Every expression in an accumulate function gets transformed in a bind expression with a generated id // Then the accumulate function will have that binding expression as a source final Class accumulateFunctionResultType = accumulateFunction.getResultType(); final String bindExpressionVariable = context.getExprId(accumulateFunctionResultType, typedExpression.toString()); String paramExprBindingId = rootNodeName; Class<?> patternType = clazz; PatternDescr inputPattern = decl.isPresent() ? null : findInputPattern(input); if (inputPattern != null) { String inputId = inputPattern.getIdentifier(); Optional<TypedDeclarationSpec> accumulateClassDeclOpt = context.getTypedDeclarationById(inputId); if (accumulateClassDeclOpt.isPresent()) { // when static method is used in accumulate function, "_this" is a pattern input // Note that DrlxParseUtil.generateLambdaWithoutParameters() takes the patternType as a class of "_this" paramExprBindingId = inputId; patternType = accumulateClassDeclOpt.get().getDeclarationClass(); } } SingleDrlxParseSuccess drlxParseResult = (SingleDrlxParseSuccess) ConstraintParser.defaultConstraintParser(context, context.getPackageModel()) .drlxParse(patternType, paramExprBindingId, printNode(parameterConverted)); if (inputPattern != null) { drlxParseResult.setAccumulateBinding( inputPattern.getIdentifier() ); } return drlxParseResult.acceptWithReturnValue(new ReplaceBindingVisitor(functionDSL, bindingId, methodCallExprType, accumulateFunctionResultType, bindExpressionVariable, drlxParseResult)); } private PatternDescr findInputPattern(BaseDescr input) { if ( input instanceof PatternDescr ) { return (PatternDescr) input; } if ( input instanceof AndDescr ) { List<BaseDescr> childDescrs = (( AndDescr ) input).getDescrs(); for (int i = childDescrs.size()-1; i >= 0; i--) { if ( childDescrs.get(i) instanceof PatternDescr ) { return (( PatternDescr ) childDescrs.get( i )); } } } return null; } private class ReplaceBindingVisitor implements ParseResultVisitor<Optional<NewBinding>> { private final MethodCallExpr functionDSL; private final String bindingId; private final Class<?> methodCallExprType; private final Class accumulateFunctionResultType; private final String bindExpressionVariable; private final SingleDrlxParseSuccess drlxParseResult; ReplaceBindingVisitor(MethodCallExpr functionDSL, String bindingId, Class<?> methodCallExprType, Class accumulateFunctionResultType, String bindExpressionVariable, SingleDrlxParseSuccess drlxParseResult) { this.functionDSL = functionDSL; this.bindingId = bindingId; this.methodCallExprType = methodCallExprType; this.accumulateFunctionResultType = accumulateFunctionResultType; this.bindExpressionVariable = bindExpressionVariable; this.drlxParseResult = drlxParseResult; } @Override public Optional<NewBinding> onSuccess(DrlxParseSuccess result) { SingleDrlxParseSuccess singleResult = drlxParseResult; singleResult.setExprBinding(bindExpressionVariable); final MethodCallExpr binding = expressionBuilder.buildBinding(singleResult); context.addDeclarationReplacing(new TypedDeclarationSpec(bindExpressionVariable, methodCallExprType)); functionDSL.addArgument(context.getVarExpr(bindExpressionVariable)); context.addDeclarationReplacing(new TypedDeclarationSpec(bindingId, accumulateFunctionResultType)); context.getExpressions().forEach(expression -> replaceTypeInExprLambdaAndIndex(bindingId, accumulateFunctionResultType, expression)); List<String> ids = new ArrayList<>(); if (singleResult.getPatternBinding() != null) { ids.add( singleResult.getPatternBinding() ); } return Optional.of(new NewBinding(ids, binding)); } @Override public Optional<NewBinding> onFail(DrlxParseFail failure) { return Optional.empty(); } } private Expression convertParameter(Expression accumulateFunctionParameter) { final Expression parameterConverted; if (accumulateFunctionParameter.isArrayAccessExpr()) { parameterConverted = new ExpressionTyper(context, Object.class, null, false) .toTypedExpression(accumulateFunctionParameter) .getTypedExpression().orElseThrow(() -> new RuntimeException("Cannot convert expression to method call" + accumulateFunctionParameter)) .getExpression(); } else if (accumulateFunctionParameter.isFieldAccessExpr()) { parameterConverted = new ExpressionTyper(context, Object.class, null, false) .toTypedExpression(accumulateFunctionParameter) .getTypedExpression().orElseThrow(() -> new RuntimeException("Cannot convert expression to method call" + accumulateFunctionParameter)) .getExpression(); } else { parameterConverted = accumulateFunctionParameter; } return parameterConverted; } private boolean parameterNeedsConvertionToMethodCallExpr(Expression accumulateFunctionParameter) { return accumulateFunctionParameter.isMethodCallExpr() || accumulateFunctionParameter.isArrayAccessExpr() || accumulateFunctionParameter.isFieldAccessExpr(); } private Optional<NewBinding> bindingParameter(PatternDescr basePattern, AccumulateDescr.AccumulateFunctionCallDescr function, MethodCallExpr functionDSL, String bindingId, String accumulateFunctionParameterStr) { final DrlxParseResult parseResult = ConstraintParser.defaultConstraintParser(context, packageModel).drlxParse(Object.class, bindingId, accumulateFunctionParameterStr); return parseResult.acceptWithReturnValue(new ParseResultVisitor<>() { @Override public Optional<NewBinding> onSuccess(DrlxParseSuccess drlxParseResult) { SingleDrlxParseSuccess singleResult = (SingleDrlxParseSuccess) drlxParseResult; Class<?> exprRawClass = singleResult.getExprRawClass(); AccumulateFunction accumulateFunction = getAccumulateFunction(function, exprRawClass); validateAccFunctionTypeAgainstPatternType(context, basePattern, accumulateFunction); final String bindExpressionVariable = context.getExprId(accumulateFunction.getResultType(), singleResult.getLeft().toString()); singleResult.setExprBinding(bindExpressionVariable); context.addDeclarationReplacing(new TypedDeclarationSpec(singleResult.getPatternBinding(), exprRawClass)); context.getExpressions().forEach(expression -> replaceTypeInExprLambdaAndIndex(bindingId, exprRawClass, expression)); functionDSL.addArgument(createAccSupplierExpr(accumulateFunction)); final MethodCallExpr newBindingFromBinary = AccumulateVisitor.this.buildBinding(bindExpressionVariable, singleResult.getUsedDeclarations(), singleResult.getExpr()); context.addDeclarationReplacing(new TypedDeclarationSpec(bindExpressionVariable, exprRawClass)); functionDSL.addArgument(context.getVarExpr(bindExpressionVariable)); return Optional.of(new NewBinding(Collections.emptyList(), newBindingFromBinary)); } @Override public Optional<NewBinding> onFail(DrlxParseFail failure) { return Optional.empty(); } }); } private void zeroParameterFunction(PatternDescr basePattern, MethodCallExpr functionDSL, String bindingId, AccumulateFunction accumulateFunction) { validateAccFunctionTypeAgainstPatternType(context, basePattern, accumulateFunction); functionDSL.addArgument(createAccSupplierExpr(accumulateFunction)); Class accumulateFunctionResultType = accumulateFunction.getResultType(); context.addDeclarationReplacing(new TypedDeclarationSpec(bindingId, accumulateFunctionResultType)); context.getExpressions().forEach(expression -> replaceTypeInExprLambdaAndIndex(bindingId, accumulateFunctionResultType, expression)); } private static MethodReferenceExpr createAccSupplierExpr(AccumulateFunction accumulateFunction) { NameExpr nameExpr = new NameExpr(accumulateFunction.getClass().getCanonicalName()); return new MethodReferenceExpr(nameExpr, new NodeList<>(), "new"); } private void validateAccFunctionTypeAgainstPatternType(RuleContext context, PatternDescr basePattern, AccumulateFunction accumulateFunction) { final String expectedResultTypeString = basePattern.getObjectType(); final Class<?> expectedResultType = DrlxParseUtil.getClassFromType(context.getTypeResolver(), DrlxParseUtil.toClassOrInterfaceType(expectedResultTypeString)); final Class actualResultType = accumulateFunction.getResultType(); final boolean isJavaDialect = context.getRuleDialect().equals(RuleContext.RuleDialect.JAVA); final boolean isQuery = context.isQuery(); final boolean isCollectFunction = isCollectFunction(accumulateFunction); final boolean isInsideAccumulate = ((AccumulateDescr) basePattern.getSource()).getInput() instanceof AndDescr; final boolean checkCollect = !isCollectFunction || isInsideAccumulate; if (!isQuery && checkCollect && isJavaDialect && !Pattern.isCompatibleWithAccumulateReturnType(expectedResultType, actualResultType)) { context.addCompilationError(new InvalidExpressionErrorResult( String.format( "Pattern of type: '[ClassObjectType class=%s]' " + "on rule '%s' " + "is not compatible with type %s returned by accumulate function." , expectedResultType.getCanonicalName(), context.getRuleName(), actualResultType.getCanonicalName()), Optional.of(context.getRuleDescr()))); } } private boolean isCollectFunction(AccumulateFunction accumulateFunction) { return accumulateFunction instanceof CollectListAccumulateFunction || accumulateFunction instanceof CollectSetAccumulateFunction || accumulateFunction instanceof CollectAccumulator; } private void addNonExistingFunctionError(RuleContext context, AccumulateDescr.AccumulateFunctionCallDescr function) { context.addCompilationError(new InvalidExpressionErrorResult(String.format("Unknown accumulate function: '%s' on rule '%s'.", function.getFunction(), context.getRuleDescr().getName()), Optional.of(context.getRuleDescr()))); } private void addBindingAsDeclaration(RuleContext context, String bindingId, Class accumulateFunctionResultType) { if (bindingId != null) { context.addDeclarationReplacing(new TypedDeclarationSpec(bindingId, accumulateFunctionResultType)); if (context.getExpressions().size() > 1) { // replace the type of the lambda with the one resulting from the accumulate operation only in the pattern immediately before it replaceTypeInExprLambdaAndIndex(bindingId, accumulateFunctionResultType, context.getExpressions().get(context.getExpressions().size()-2)); } } } private AccumulateFunction getAccumulateFunction(AccumulateDescr.AccumulateFunctionCallDescr function, Class<?> methodCallExprType) { final String accumulateFunctionName = AccumulateUtil.getFunctionName(() -> methodCallExprType, function.getFunction()); return getAccumulateFunction(accumulateFunctionName, function); } private AccumulateFunction getAccumulateFunction(String accumulateFunctionName, AccumulateDescr.AccumulateFunctionCallDescr function) { final Optional<AccumulateFunction> bundledAccumulateFunction = ofNullable(packageModel.getConfiguration().getOption(AccumulateFunctionOption.KEY, accumulateFunctionName).getFunction()); final Optional<AccumulateFunction> importedAccumulateFunction = ofNullable(packageModel.getAccumulateFunctions().get(accumulateFunctionName)); return bundledAccumulateFunction .map(Optional::of) .orElse(importedAccumulateFunction) .orElseThrow(() -> new AccumulateNonExistingFunction(function)); } private String getRootNodeName(DrlxParseUtil.RemoveRootNodeResult methodCallWithoutRootNode) { final Expression rootNode = methodCallWithoutRootNode.getRootNode().orElseThrow(UnsupportedOperationException::new); final String rootNodeName; if (rootNode instanceof NameExpr) { rootNodeName = ((NameExpr) rootNode).getName().asString(); } else { throw new AccumulateParsingFailedException("Root node of expression should be a declaration"); } return rootNodeName; } protected Expression buildConstraintExpression(Expression expr, Collection<String> usedDeclarations) { LambdaExpr lambdaExpr = new LambdaExpr(); lambdaExpr.setEnclosingParameters(true); usedDeclarations.stream().map(s -> new Parameter(context.getDelarationType(s), s)).forEach(lambdaExpr::addParameter); lambdaExpr.setBody(new ExpressionStmt(expr)); return lambdaExpr; } static List<String> collectNamesInBlock(BlockStmt block, RuleContext context) { return block.findAll(NameExpr.class, n -> { Optional<TypedDeclarationSpec> optD = context.getTypedDeclarationById(n.getNameAsString()); return optD.filter(d -> !d.isGlobal()).isPresent(); // Global aren't supported }) .stream() .map(NameExpr::getNameAsString) .distinct() .collect(toList()); } private void pushAccumulateContext( MethodCallExpr accumulateExprs ) { context.pushExprPointer(accumulateExprs::addArgument); } private MethodCallExpr buildBinding(String bindingName, Collection<String> usedDeclaration, Expression expression) { MethodCallExpr bindDSL = new MethodCallExpr(null, BIND_CALL); bindDSL.addArgument(context.getVar(bindingName)); usedDeclaration.stream().map(context::getVarExpr).forEach(bindDSL::addArgument); bindDSL.addArgument(buildConstraintExpression(expression, usedDeclaration)); return bindDSL; } private void processNewBinding(Optional<NewBinding> optNewBinding, MethodCallExpr accumulateDSL) { optNewBinding.ifPresent(newBinding -> { final List<Expression> allExpressions = context.getExpressions(); final MethodCallExpr newBindingExpression = newBinding.bindExpression; if (newBinding.patternBinding.size() == 1) { new PatternToReplace(context, newBinding.patternBinding).findFromPattern() .ifPresent(pattern -> addBindAsLastChainCall(newBindingExpression, pattern)); String binding = newBinding.patternBinding.iterator().next(); composeTwoBindings(binding, newBindingExpression); } else if (newBinding.patternBinding.size() == 2) { String binding = newBinding.patternBinding.iterator().next(); composeTwoBindings(binding, newBindingExpression); } else { final MethodCallExpr lastPattern = DrlxParseUtil.findLastPattern(allExpressions) .orElseThrow(() -> new RuntimeException("Need the last pattern to add the binding")); final MethodCallExpr replacedBinding = replaceBindingWithPatternBinding(newBindingExpression, lastPattern); addBindAsLastChainCall(replacedBinding, lastPattern); } }); } private void composeTwoBindings(String binding, MethodCallExpr newBindingExpression) { context.findBindingExpression(binding).ifPresent(oldBind -> { // compose newComposedBinding using oldBind and newBindingExpression. But still keep oldBind. LambdaExpr oldBindLambda = oldBind.findFirst(LambdaExpr.class).orElseThrow(RuntimeException::new); LambdaExpr newBindLambda = newBindingExpression.findFirst(LambdaExpr.class).orElseThrow(RuntimeException::new); LambdaExpr tmpOldBindLambda = oldBindLambda.clone(); Expression newComposedLambda = LambdaUtil.appendNewLambdaToOld(tmpOldBindLambda, newBindLambda); MethodCallExpr newComposedBinding = new MethodCallExpr(BIND_CALL, newBindingExpression.getArgument(0), newComposedLambda); newComposedBinding.setScope(oldBind.getScope().orElseThrow(RuntimeException::new)); Optional<MethodCallExpr> optReactOn = oldBind.getArguments().stream() .filter(MethodCallExpr.class::isInstance) .map(MethodCallExpr.class::cast) .filter(exp -> exp.getName().asString().equals(REACT_ON_CALL)) .findFirst(); if (optReactOn.isPresent()) { newComposedBinding.addArgument(optReactOn.get().clone()); } oldBind.setScope(newComposedBinding); // insert newComposedBinding at the first in the chain }); } private void addBindAsLastChainCall(MethodCallExpr newBindingExpression, MethodCallExpr pattern) { final Optional<Node> optParent = pattern.getParentNode(); newBindingExpression.setScope(pattern); optParent.ifPresent(parent -> { parent.replace(pattern, newBindingExpression); pattern.setParentNode( newBindingExpression ); }); } private MethodCallExpr replaceBindingWithPatternBinding(MethodCallExpr bindExpression, MethodCallExpr lastPattern) { // This method links a binding expression, used to evaluate the accumulated value, // to the last pattern in a multi-pattern accumulate like the following // // accumulate( $c : Child( age < 10 ) and $a : Adult( name == $c.parent ) and $s : String( this == $a.name ), // $sum : sum($a.getAge() + $c.getAge() + $s.length()) ) // // In the case the bindExpression, that will have to be linked to the $s pattern, is originally generated as // // bind(var_$sum, var_$a, var_$c, var_$s, (Adult $a, Child $c, String $s) -> $a.getAge() + $c.getAge() + $s.length()) final Expression bindingId = lastPattern.getArgument(0); bindExpression.findFirst(NameExpr.class, e -> e.equals(bindingId)).ifPresent( name -> { // since the bind has to be linked to $s, the corresponding variable should be removed from the arguments list so it becomes // bind(var_$sum, var_$a, var_$c, (Adult $a, Child $c, String $s) -> $a.getAge() + $c.getAge() + $s.length()) bindExpression.remove(name); // also the first formal parameter in the binding lambda has to be $s so it becomes // bind(var_$sum, var_$a, var_$c, (String $s, Adult $a, Child $c) -> $a.getAge() + $c.getAge() + $s.length()) LambdaExpr lambda = (LambdaExpr)bindExpression.getArgument( bindExpression.getArguments().size()-1 ); if (lambda.getParameters().size() > 1) { String formalArg = context.fromVar( name.getNameAsString() ); for (Parameter param : lambda.getParameters()) { if (param.getNameAsString().equals( formalArg )) { lambda.getParameters().remove( param ); lambda.getParameters().add( 0, param ); break; } } } } ); return bindExpression; } static class NewBinding { List<String> patternBinding; MethodCallExpr bindExpression; NewBinding(List<String> patternBinding, MethodCallExpr bindExpression) { this.patternBinding = patternBinding; this.bindExpression = bindExpression; } } private class AccumulateParsingFailedException extends RuntimeException { AccumulateParsingFailedException(String message) { super(message); } } private class AccumulateNonExistingFunction extends RuntimeException { private final AccumulateDescr.AccumulateFunctionCallDescr function; AccumulateNonExistingFunction(AccumulateDescr.AccumulateFunctionCallDescr function) { this.function = function; } } }
apache/incubator-retired-wave
38,012
wave/src/test/java/org/waveprotocol/wave/concurrencycontrol/channel/WaveletDeltaChannelImplTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.concurrencycontrol.channel; import junit.framework.TestCase; import org.waveprotocol.wave.common.logging.PrintLogger; import org.waveprotocol.wave.concurrencycontrol.common.ChannelException; import org.waveprotocol.wave.concurrencycontrol.common.Recoverable; import org.waveprotocol.wave.concurrencycontrol.common.ResponseCode; import org.waveprotocol.wave.model.id.WaveId; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.operation.wave.WaveletDelta; import org.waveprotocol.wave.model.testing.BasicFactories; import org.waveprotocol.wave.model.testing.DeltaTestUtil; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.wave.ParticipantId; import org.waveprotocol.wave.model.wave.data.ObservableWaveletData; import org.waveprotocol.wave.model.wave.data.impl.EmptyWaveletSnapshot; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Queue; /** * Tests the {@link WaveletDeltaChannelImpl} by mocking the back-end channel. * */ public class WaveletDeltaChannelImplTest extends TestCase { /** * A mock delta channel receiver which support setting expectations of method * calls. */ private static class MockReceiver implements WaveletDeltaChannel.Receiver { private static enum Method { CONNECT, ACK, COMMIT, DELTA, NACK } private final Queue<Object[]> expectations = new LinkedList<Object[]>(); public void expectConnection(HashedVersion connectVersion, HashedVersion currentVersion) { expectations.add(new Object[] {Method.CONNECT, connectVersion, currentVersion}); } public void expectAck(int opsApplied, HashedVersion version) { expectations.add(new Object[] {Method.ACK, opsApplied, version}); } public void expectCommit(long version) { expectations.add(new Object[] {Method.COMMIT, version}); } public void expectDelta(TransformedWaveletDelta delta) { expectations.add(new Object[] {Method.DELTA, delta}); } public void expectNack(String errorString, long version) { expectations.add(new Object[] {Method.NACK, errorString, version}); } public void checkExpectationsSatisfied() { assertTrue(expectations.isEmpty()); } @Override public void onConnection(HashedVersion connectVersion, HashedVersion currentVersion) { Object[] expected = expectations.remove(); assertEquals(expected[0], Method.CONNECT); assertEquals("incorrect connectVersion", expected[1], connectVersion); assertEquals("incorrect currentVersion", expected[2], currentVersion); } @Override public void onAck(int opsApplied, HashedVersion version) { Object[] expected = expectations.remove(); assertEquals(expected[0], Method.ACK); assertEquals("incorrect opsApplied", expected[1], opsApplied); assertEquals("incorrect version", expected[2], version); } @Override public void onCommit(long version) { Object[] expected = expectations.remove(); assertEquals(expected[0], Method.COMMIT); assertEquals("incorrect version", expected[1], version); } @Override public void onDelta(TransformedWaveletDelta delta) { Object[] expected = expectations.remove(); assertEquals(expected[0], Method.DELTA); assertEquals("incorrect delta", expected[1], delta); } @Override public void onNack(ResponseCode responseCode, String errorString, long version) { Object[] expected = expectations.remove(); assertTrue(responseCode != ResponseCode.OK); assertEquals(expected[0], Method.NACK); assertEquals("incorrect error string", expected[1], errorString); assertEquals("incorrect version", expected[2], version); } } /** * A mock wavelet channel which allows setting expectations for method calls. */ private static class MockWaveletChannel implements WaveletChannel { private final Queue<WaveletDelta> expectations = new LinkedList<WaveletDelta>(); private final Queue<SubmitCallback> awaitingAck = new LinkedList<SubmitCallback>(); public void expectSubmit(WaveletDelta delta1) { expectations.add(delta1); } public void checkExpectationsSatisfied() { assertTrue(expectations.isEmpty()); } @Override public void submit(WaveletDelta delta, SubmitCallback callback) { WaveletDelta e = expectations.remove(); assertEquals(e, delta); awaitingAck.add(callback); } public void ackSubmit(int opsApplied, long version, byte[] signature) throws ChannelException { SubmitCallback nextCallback = awaitingAck.remove(); nextCallback.onSuccess(opsApplied, HashedVersion.of(version, signature), ResponseCode.OK, null); } public void nackSubmit(String reason, ResponseCode error, long version, byte[] signature) throws ChannelException { SubmitCallback nextCallback = awaitingAck.remove(); nextCallback.onSuccess(0, HashedVersion.of(version, signature), error, reason); } public void failSubmit(String reason) throws ChannelException { SubmitCallback nextCallback = awaitingAck.remove(); nextCallback.onFailure(reason); } @Override public String debugGetProfilingInfo() { return null; } } private static final ParticipantId USER_ID = ParticipantId.ofUnsafe("test@example.com"); private static final DeltaTestUtil UTIL = new DeltaTestUtil(USER_ID); private final static WaveId WAVE_ID = WaveId.of("example.com", "waveid"); private final static WaveletId WAVELET_ID = WaveletId.of("example.com", "waveletid"); private static final ObservableWaveletData.Factory<?> DATA_FACTORY = BasicFactories.waveletDataImplFactory(); private MockWaveletChannel waveletChannel; private MockReceiver receiver; private final PrintLogger logger = new PrintLogger(); private WaveletDeltaChannelImpl deltaChannel; // channel under test @Override public void setUp() { waveletChannel = new MockWaveletChannel(); receiver = new MockReceiver(); deltaChannel = new WaveletDeltaChannelImpl(waveletChannel, logger); deltaChannel.reset(receiver); } /** * Tests that resetting a new channel does nothing. */ public void testResetNewChannel() { deltaChannel.reset(null); waveletChannel.checkExpectationsSatisfied(); } /** * Tests that sending on an unconnected channel fails. */ public void testSendBeforeConnectFails() { try { sendDelta(buildDelta(1, 1)); fail("Expected IllegalStateException"); } catch (IllegalStateException expected) { } waveletChannel.checkExpectationsSatisfied(); } /** * Tests that a channel with no receivers quietly drops messages. */ public void testChannelWithNoReceiverDropsMessages() throws ChannelException { final long initialVersion = 57; final byte[] signature = sig(1); final ObservableWaveletData wavelet = buildSnapshot(initialVersion, signature); final TransformedWaveletDelta delta = buildServerDelta(initialVersion, 3); deltaChannel.reset(null); // Clear receiver. connectChannel(wavelet); receiveUpdateOnConnectedChannel(delta, initialVersion, signature); receiver.checkExpectationsSatisfied(); } /** * Tests that a last committed version sent with the connect message is * delivered. */ public void testConnectReceivesCommittedVersion() throws ChannelException { final long currentVersion = 57; final byte[] signature = sig(1); final long committedVersion = 50; final byte[] committedSignature = sig(2); final HashedVersion signedVersion = HashedVersion.of(currentVersion, signature); final ObservableWaveletData wavelet = buildSnapshot(currentVersion, signature); receiver.expectConnection(signedVersion, signedVersion); receiver.expectCommit(committedVersion); connectChannel(wavelet, committedVersion, committedSignature); receiver.checkExpectationsSatisfied(); } /** * Tests that the last committed version sent with a reconnect initial delta is * delivered. */ public void testReconnectReceivesCommittedVersion() throws ChannelException { final long currentVersion = 57; final byte[] signature = sig(1); final long committedVersion = 50; final byte[] committedSignature = sig(2); final HashedVersion signedVersion = HashedVersion.of(currentVersion, signature); final List<TransformedWaveletDelta> reconnect = buildReconnect(currentVersion, signature); // Expect connection. receiver.expectConnection(signedVersion, signedVersion); receiver.expectCommit(committedVersion); reconnectChannel(reconnect, committedVersion, committedSignature); receiver.checkExpectationsSatisfied(); } /** * Tests that when the client terminates the channel the receiver * is not notified. */ public void testClientResetTerminatesSilently() throws ChannelException { checkedConnectChannel(57); deltaChannel.reset(null); receiver.checkExpectationsSatisfied(); } /** * Tests that a delta sent down the channel is received and the acknowledgment * delivered. */ public void testSubmitDelta() throws ChannelException { final long currentVersion = 57; final int ops1 = 7; final WaveletDelta delta1 = buildDelta(currentVersion, ops1); final byte[] signature1 = sig(1); final WaveletDelta delta2 = buildDelta(currentVersion + ops1, 2); final int opsCommitted = 3; final String errorMsg2 = "SERVER_ERROR"; checkedConnectChannel(currentVersion); // Send delta1. submitDeltaOnConnectedChannel(delta1); // All ops are acked. receiver.expectAck(ops1, HashedVersion.of(currentVersion + ops1, signature1)); ackDeltaOnConnectedChannel(currentVersion + ops1, ops1, signature1); // Send delta2. submitDeltaOnConnectedChannel(delta2); // Nack with a randomly injected error, as if something just went wrong // server-side. receiver.expectNack(errorMsg2, currentVersion + ops1); nackDeltaOnConnectedChannel(currentVersion + ops1, signature1, errorMsg2, ResponseCode.INTERNAL_ERROR); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a delta submitted after a reset has sequence number 1. */ public void testSubmitAfterResetRestartsSequence() throws ChannelException { final long currentVersion = 57; final int ops1 = 7; final WaveletDelta delta1 = buildDelta(currentVersion, ops1); final byte[] signature1 = sig(1); final WaveletDelta delta2 = buildDelta(currentVersion + ops1, 2); checkedConnectChannel(currentVersion); // Send delta1. submitDeltaOnConnectedChannel(delta1); // All ops are acked. receiver.expectAck(ops1, HashedVersion.of(currentVersion + ops1, signature1)); ackDeltaOnConnectedChannel(currentVersion + ops1, ops1, signature1); deltaChannel.reset(receiver); checkedReconnectChannel(buildReconnect(currentVersion + ops1, signature1), currentVersion + ops1, signature1); // Send delta2. submitDeltaOnConnectedChannel(delta2); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a delta (with commit version) received from the server is * delivered. */ public void testReceiveDelta() throws ChannelException { final long initialVersion = 57; final byte[] commitSig = sig(1); final TransformedWaveletDelta delta = buildServerDelta(initialVersion, 7); checkedConnectChannel(initialVersion); // Receive and deliver delta. receiver.expectCommit(initialVersion); receiver.expectDelta(delta); receiveUpdateOnConnectedChannel(delta, initialVersion, commitSig); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a last-committed-version message with no deltas is correctly * delivered. */ public void testReceiveLastCommittedVersion() throws ChannelException { final long initialVersion = 57; final long committedVersion = 50; final byte[] committedSignature = sig(1); checkedConnectChannel(initialVersion); receiver.expectCommit(committedVersion); receiveUpdateOnConnectedChannel(null, committedVersion, committedSignature); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a submit delta interleaves properly with received deltas when the ACK is * received in sequence with server deltas. */ public void testSynchronizedAckDelta() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Acknowledge all ops immediately. final long versionAfterClient = versionAfterServer1 + clientOps; final byte[] ackedSignature = sig(1); receiver.expectAck(clientOps, HashedVersion.of(versionAfterClient, ackedSignature)); ackDeltaOnConnectedChannel(versionAfterClient, clientOps, ackedSignature); // Receive a second server delta. final int serverOps2 = 3; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, serverOps2); receiver.expectDelta(delta2); receiveUpdateOnConnectedChannel(delta2); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a submit delta interleaves properly with received deltas when the ACK is * received after a subsequent server delta. */ public void testLateAckDelta() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final byte[] sigAfterServer1 = sig(1); final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Receive a second server delta, logically after the client ops. final int serverOps2 = 3; final long versionAfterClient = versionAfterServer1 + clientOps; final byte[] sigAfterClient = sig(2); final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, serverOps2); // Don't expect the delta yet. receiveUpdateOnConnectedChannel(delta2); // Receive commit message for a received delta while there are queued // messages; the message jumps the queue to be delivered immediately. receiver.expectCommit(versionAfterServer1); receiveUpdateOnConnectedChannel(null, versionAfterServer1, sigAfterServer1); // Receive a commit message for the outstanding submit delta. This // message is delayed until the ack. receiveUpdateOnConnectedChannel(null, versionAfterClient, sigAfterClient); // Receive the ack and expect the pending delta and commit (in the order received). final byte[] ackedSignature = sig(1); receiver.expectAck(clientOps, HashedVersion.of(versionAfterClient, ackedSignature)); receiver.expectCommit(versionAfterClient); receiver.expectDelta(delta2); ackDeltaOnConnectedChannel(versionAfterClient, clientOps, ackedSignature); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a submit delta interleaves properly with received deltas when the ACK is * received before a preceding server delta. */ public void testEarlyAckDelta() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Submit delta. final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(initialVersion, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Acknowledge the submitted delta against a future version. final long versionAfterServer1 = initialVersion + serverOps1; final long versionAfterClient = versionAfterServer1 + clientOps; final byte[] ackedSignature = sig(1); // Don't expect ack yet. ackDeltaOnConnectedChannel(versionAfterClient, clientOps, ackedSignature); // Receive the server delta and the expect pending ack. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiver.expectAck(clientOps, HashedVersion.of(versionAfterClient, ackedSignature)); receiveUpdateOnConnectedChannel(delta1); // Receive a second server delta, logically after the client ops. final int serverOps2 = 3; final long versionAfterServer2 = versionAfterClient + serverOps2; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, serverOps2); receiver.expectDelta(delta2); receiveUpdateOnConnectedChannel(delta2); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that the channel can handle receiving an ack for fewer ops than * were submitted, as happens when the server transforms some away. * * Only the synchronized case is tested. */ public void testShrunkAckDelta() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Acknowledge immediately. final int ackedOps = 0; final long versionAfterClient = versionAfterServer1 + ackedOps; final byte[] ackedSignature = sig(1); receiver.expectAck(ackedOps, HashedVersion.of(versionAfterClient, ackedSignature)); ackDeltaOnConnectedChannel(versionAfterClient, ackedOps, ackedSignature); // Receive a second server delta. final int serverOps2 = 3; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, serverOps2); receiver.expectDelta(delta2); receiveUpdateOnConnectedChannel(delta2); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that a submit delta interleaves properly with received deltas when ops are NACKed * before the next server delta. */ public void testSynchronizedNackDelta() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); checkedConnectChannel(initialVersion); // Receive server delta. receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final byte[] sigAfterServer1 = sig(1); final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Nack delta immediately. final String error = "error"; receiver.expectNack(error, versionAfterServer1); nackDeltaOnConnectedChannel(versionAfterServer1, sigAfterServer1, error, ResponseCode.BAD_REQUEST); // Receive a second server delta. final int serverOps2 = 3; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterServer1, serverOps2); receiver.expectDelta(delta2); receiveUpdateOnConnectedChannel(delta2); // Try another submit. final long versionAfterServer2 = versionAfterServer1 + serverOps2; final WaveletDelta clientDelta2 = buildDelta(versionAfterServer2, clientOps); submitDeltaOnConnectedChannel(clientDelta2); // Ack. final long versionAfterClient2 = versionAfterServer2 + clientOps; final byte[] ackedSignature2 = sig(2); receiver.expectAck(clientOps, HashedVersion.of(versionAfterClient2, ackedSignature2)); ackDeltaOnConnectedChannel(versionAfterClient2, clientOps, ackedSignature2); // Close. closeChannel(); receiver.checkExpectationsSatisfied(); } /** * Tests that receiving an acknowledged delta submitted by this client is * detected as a server-side error. */ public void testReflectedSubmittedDeltaAfterAckIsError() throws ChannelException { final long currentVersion = 57; final int ops1 = 7; final byte[] signature1 = sig(1); final WaveletDelta delta1 = buildDelta(currentVersion, ops1); checkedConnectChannel(currentVersion); // Send delta1. submitDeltaOnConnectedChannel(delta1); // All ops are acked. receiver.expectAck(ops1, HashedVersion.of(currentVersion + ops1, signature1)); ackDeltaOnConnectedChannel(currentVersion + ops1, ops1, signature1); // Receive the delta (erroneously). Expect termination to be reported. try { receiveUpdateOnConnectedChannel(buildServerDelta(currentVersion, ops1)); fail("ChannelException expected"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } /** * Tests that receiving an ack for a delta already received by this client * is detected as a server-side error. */ public void testAckForReceivedDeltaIsError() throws ChannelException { final long currentVersion = 57; final int ops1 = 7; final byte[] signature1 = sig(1); final WaveletDelta delta1 = buildDelta(currentVersion, ops1); checkedConnectChannel(currentVersion); // Send delta1. submitDeltaOnConnectedChannel(delta1); // Receive the delta (erroneously), but we can't detect it's an error yet. TransformedWaveletDelta serverDelta1 = buildServerDelta(currentVersion, ops1); receiver.expectDelta(serverDelta1); receiveUpdateOnConnectedChannel(serverDelta1); // Now receive the ack for the delta. Expect failure. try { ackDeltaOnConnectedChannel(currentVersion + ops1, ops1, signature1); fail("ChannelException expected"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } /** * Tests that an ack received for a delta submitted before a channel * reconnects is dropped. */ public void testAckAfterReconnectIgnored() throws ChannelException { final long initialVersion = 57; final byte[] initialSignature = sig(4); checkedConnectChannel(initialVersion); // Submit delta. final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(initialVersion, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Reset channel. deltaChannel.reset(receiver); final List<TransformedWaveletDelta> reconnect = buildReconnect(initialVersion, initialSignature); checkedReconnectChannel(reconnect, 0, new byte[0]); // Acknowledge outstanding submit. final long versionAfterClient = initialVersion + clientOps; final byte[] ackedSignature = sig(1); // Don't expect the ack at the receiver. ackDeltaOnConnectedChannel(versionAfterClient, clientOps, ackedSignature); receiver.checkExpectationsSatisfied(); } public void testNackTooOldIsRecoverable() throws ChannelException { final long initialVersion = 0; checkedConnectChannel(initialVersion); // Submit delta. final long submitVersion = 0; final byte[] signature = sig(1); final int clientOps = 1; final WaveletDelta clientDelta = buildDelta(submitVersion, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Nack delta immediately with TOO_OLD. try { nackDeltaOnConnectedChannel(submitVersion, signature, "too old", ResponseCode.TOO_OLD); fail("Expected an exception"); } catch (ChannelException e) { assertEquals(Recoverable.RECOVERABLE, e.getRecoverable()); } } /** * Tests that the delta channel detects a gap in the op stream and * throws an exception. */ public void testMissingDeltaKillsChannel() throws ChannelException { final long initialVersion = 57; final byte[] signature = sig(1); final int ops = 7; final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, ops); checkedConnectChannel(initialVersion); // Receive and deliver delta. receiver.expectCommit(initialVersion); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1, initialVersion, signature); // Receive delta with a version number too high. final TransformedWaveletDelta delta2 = buildServerDelta(initialVersion + ops + 1, 1); try { receiveUpdateOnConnectedChannel(delta2, initialVersion, signature); fail("Expected a ChannelException"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } /** * Tests that the delta channel detects a gap in the op stream even when * there is an outstanding submission. */ public void testMissingDeltaWithLateAckKillsChannel() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final int sigAfterServer1 = 0x11111111; final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Receive a second server delta that's after our submission, // will be queued. final long versionAfterClient = versionAfterServer1 + clientOps; final int sigAfterClient = 0x22222222; final int serverOps2 = 3; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, serverOps2); receiveUpdateOnConnectedChannel(delta2); // Receive a third server delta that's skipped a version. final long versionAfterServer2 = versionAfterClient + serverOps2; final int sigAfterServer2 = 0x44444444; final TransformedWaveletDelta delta3 = buildServerDelta(versionAfterServer2 + 1, 1); try { receiveUpdateOnConnectedChannel(delta3); fail("Expected a ChannelException"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } /** * Tests that the delta channel detects a gap in the op stream even when there * is an oustanding delta submission if the ack couldn't possibly account for * the version gap. */ public void testMissingDeltaBeyondLateAckKillsChannel() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final int sigAfterServer1 = 0x11111111; final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Receive a second server delta, too far ahead of the client ops. final long versionAfterClient = versionAfterServer1 + clientOps; final int sigAfterClient = 0x22222222; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient + 1, 1); try { receiveUpdateOnConnectedChannel(delta2); fail("Expected a ChannelException"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } /** * Tests that the delta channel detects a gap in the op stream when a received * ack doesn't account for the version gap. */ public void testMissingDeltaBeyondShortAckKillsChannel() throws ChannelException { final long initialVersion = 57; final int serverOps1 = 7; checkedConnectChannel(initialVersion); // Receive server delta. final TransformedWaveletDelta delta1 = buildServerDelta(initialVersion, serverOps1); receiver.expectDelta(delta1); receiveUpdateOnConnectedChannel(delta1); // Submit delta. final long versionAfterServer1 = initialVersion + serverOps1; final int sigAfterServer1 = 0x11111111; final int clientOps = 5; final WaveletDelta clientDelta = buildDelta(versionAfterServer1, clientOps); submitDeltaOnConnectedChannel(clientDelta); // Receive a second server delta that initially looks ok. final long versionAfterClient = versionAfterServer1 + clientOps; final int sigAfterClient = 0x22222222; final TransformedWaveletDelta delta2 = buildServerDelta(versionAfterClient, 1); receiveUpdateOnConnectedChannel(delta2); // Receive ack for fewer than the number of ops than we // sent (some were transformed away). final byte[] ackedSignature = sig(3); receiver.expectAck(clientOps - 1, HashedVersion.of(versionAfterClient - 1, ackedSignature)); try { ackDeltaOnConnectedChannel(versionAfterClient - 1, clientOps - 1, ackedSignature); fail("Expected a ChannelException"); } catch (ChannelException expected) { } receiver.checkExpectationsSatisfied(); } // --- Helper methods --- // /** * Connects the channel by sending an initial message with a wavelet snapshot. * The committed version is synthesized to match the snapshot version. * * @param wave initial wavelet snapshot */ private void connectChannel(ObservableWaveletData wave) throws ChannelException { connectChannel(wave, wave.getVersion(), wave.getHashedVersion().getHistoryHash()); } /** * Connects the channel by sending an initial message with a wavelet snapshot. * The message includes a committed version. * * @param snapshot initial wavelet snapshot * @param committed last committed version at connection (or -1) * @param commitSignature last committed signature at connection */ private void connectChannel(ObservableWaveletData snapshot, long committed, byte[] commitSignature) throws ChannelException { HashedVersion commitVersion = HashedVersion.of(committed, commitSignature); deltaChannel.onWaveletSnapshot(snapshot, commitVersion, null); } /** * Reconnects the channel at the current version by sending an initial message with a * version and signature. * * @param reconnect reconnection message * @param committed last committed version at reconnection */ private void reconnectChannel(List<TransformedWaveletDelta> reconnect, long committed, byte[] commitSignature) throws ChannelException { HashedVersion commitVersion = HashedVersion.of(committed, commitSignature); deltaChannel.onWaveletUpdate(reconnect, commitVersion, null); } /** * Connects the channel and checks expectations. * * @param currentVersion version at which to connect. */ private void checkedConnectChannel(long currentVersion) throws ChannelException { byte[] signature = sig(currentVersion); final HashedVersion signedVersion = HashedVersion.of(currentVersion, sig(currentVersion)); final ObservableWaveletData wavelet = buildSnapshot(currentVersion, signature); receiver.expectConnection(signedVersion, signedVersion); receiver.expectCommit(currentVersion); connectChannel(wavelet); receiver.checkExpectationsSatisfied(); } /** * Reconnects the channel and checks expectations. * * @param list reconnection message * @param committed committed version * @param commitSignature committed signature */ private void checkedReconnectChannel(List<TransformedWaveletDelta> list, long committed, byte[] commitSignature) throws ChannelException { HashedVersion signedVersion = HashedVersion.of( list.get(0).getAppliedAtVersion(), list.get(0).getResultingVersion().getHistoryHash()); receiver.expectConnection(signedVersion, signedVersion); receiver.expectCommit(committed); reconnectChannel(list, committed, commitSignature); } /** * Simulates channel termination. */ private void closeChannel() { deltaChannel.reset(null); } /** Tracks state for sendDeltaOnConnectedChannel. */ private String sendState = "uninitialized"; /** * Sends a delta on a connected channel. * @param delta1 the delta to send */ private void submitDeltaOnConnectedChannel(final WaveletDelta delta1) { waveletChannel.expectSubmit(delta1); sendState = "initial"; sendDelta(delta1); assertEquals("sending", sendState); } /** * Sends a delta on a channel. The sendState is set to "sending" when * the channel takes the delta. */ private void sendDelta(final WaveletDelta delta) { deltaChannel.send(new WaveletDeltaChannel.Transmitter() { public ClientMessage takeMessage() { assertEquals("initial", sendState); sendState = "sending"; return new ClientMessage(delta, false); } }); } /** * Acknowledges a delta on a connected channel. * * @param ackedVersion version to acknowledge * @param opsApplied number of ops applied * @param signature acknowledged signature */ private void ackDeltaOnConnectedChannel(long ackedVersion, int opsApplied, byte[] signature) throws ChannelException { waveletChannel.ackSubmit(opsApplied, ackedVersion, signature); } /** * Negatively acknowledges a delta on a connected channel (possibly acknowledging some ops). */ private void nackDeltaOnConnectedChannel(long ackedVersion, byte[] signature, String reason, ResponseCode error) throws ChannelException { waveletChannel.nackSubmit(reason, error, ackedVersion, signature); } /** * Receives a delta message on a connected channel. * * @param delta1 delta to receive, or null for no delta */ private void receiveUpdateOnConnectedChannel(TransformedWaveletDelta delta1) throws ChannelException { receiveUpdateOnConnectedChannel(delta1, -1, new byte[0]); } /** * Receives a delta message on a connected channel. * * @param delta delta to receive, or null for no delta * @param committed last committed version, or -1 to omit */ private void receiveUpdateOnConnectedChannel(TransformedWaveletDelta delta, long committed, byte[] commitSignature) throws ChannelException { HashedVersion commitVersion = null; List<TransformedWaveletDelta> deltas = CollectionUtils.newArrayList(); if (delta != null) { deltas.add(delta); } if (committed != -1) { commitVersion = HashedVersion.of(committed, commitSignature); } deltaChannel.onWaveletUpdate(deltas, commitVersion, null); } /** * Builds a minimal snapshot message. * * @param version wavelet version * @param signature wavelet signature */ private ObservableWaveletData buildSnapshot(final long version, final byte[] signature) { HashedVersion hv = HashedVersion.of(version, signature); return DATA_FACTORY.create( new EmptyWaveletSnapshot(WAVE_ID, WAVELET_ID, new ParticipantId("creator@gwave.com"), hv, 0L)); } /** * Builds a minimal reconnection initial delta. * * @param version reconnect version * @param signature reconnect signature */ private List<TransformedWaveletDelta> buildReconnect(long version, byte[] signature) { return Collections.singletonList(UTIL.makeTransformedDelta(0L, HashedVersion.of(version, signature), 0)); } /** Builds a client delta with numOps ops. */ private WaveletDelta buildDelta(long targetVersion, int numOps) { return UTIL.makeDelta(HashedVersion.unsigned(targetVersion), 123457890L, numOps); } /** Builds a server delta with numOps ops. */ private TransformedWaveletDelta buildServerDelta(long initialVersion, int numOps) { return UTIL.makeTransformedDelta(1234567890L, HashedVersion.unsigned(initialVersion + numOps), numOps); } private static byte[] sig(long v) { return new byte[] {( byte)v, (byte)v, (byte)v, (byte)v }; } }
apache/gravitino
37,943
server/src/main/java/org/apache/gravitino/server/web/rest/ExceptionHandlers.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.gravitino.server.web.rest; import com.google.common.annotations.VisibleForTesting; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.gravitino.dto.responses.ErrorResponse; import org.apache.gravitino.exceptions.AlreadyExistsException; import org.apache.gravitino.exceptions.CatalogAlreadyExistsException; import org.apache.gravitino.exceptions.ConnectionFailedException; import org.apache.gravitino.exceptions.FilesetAlreadyExistsException; import org.apache.gravitino.exceptions.ForbiddenException; import org.apache.gravitino.exceptions.GroupAlreadyExistsException; import org.apache.gravitino.exceptions.InUseException; import org.apache.gravitino.exceptions.JobTemplateAlreadyExistsException; import org.apache.gravitino.exceptions.MetalakeAlreadyExistsException; import org.apache.gravitino.exceptions.MetalakeInUseException; import org.apache.gravitino.exceptions.MetalakeNotInUseException; import org.apache.gravitino.exceptions.ModelAlreadyExistsException; import org.apache.gravitino.exceptions.ModelVersionAliasesAlreadyExistException; import org.apache.gravitino.exceptions.NoSuchMetalakeException; import org.apache.gravitino.exceptions.NonEmptyCatalogException; import org.apache.gravitino.exceptions.NonEmptyMetalakeException; import org.apache.gravitino.exceptions.NonEmptySchemaException; import org.apache.gravitino.exceptions.NotFoundException; import org.apache.gravitino.exceptions.NotInUseException; import org.apache.gravitino.exceptions.PartitionAlreadyExistsException; import org.apache.gravitino.exceptions.PolicyAlreadyAssociatedException; import org.apache.gravitino.exceptions.PolicyAlreadyExistsException; import org.apache.gravitino.exceptions.RoleAlreadyExistsException; import org.apache.gravitino.exceptions.SchemaAlreadyExistsException; import org.apache.gravitino.exceptions.TableAlreadyExistsException; import org.apache.gravitino.exceptions.TagAlreadyAssociatedException; import org.apache.gravitino.exceptions.TagAlreadyExistsException; import org.apache.gravitino.exceptions.TopicAlreadyExistsException; import org.apache.gravitino.exceptions.UserAlreadyExistsException; import org.apache.gravitino.server.web.Utils; import org.eclipse.jetty.util.StringUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ExceptionHandlers { private static final Logger LOG = LoggerFactory.getLogger(ExceptionHandlers.class); private ExceptionHandlers() {} public static Response handlePartitionException( OperationType op, String partition, String table, Exception e) { return PartitionExceptionHandler.INSTANCE.handle(op, partition, table, e); } public static Response handleTableException( OperationType op, String table, String schema, Exception e) { return TableExceptionHandler.INSTANCE.handle(op, table, schema, e); } public static Response handleSchemaException( OperationType op, String schema, String catalog, Exception e) { return SchemaExceptionHandler.INSTANCE.handle(op, schema, catalog, e); } public static Response handleCatalogException( OperationType op, String catalog, String metalake, Exception e) { return CatalogExceptionHandler.INSTANCE.handle(op, catalog, metalake, e); } public static Response handleMetalakeException(OperationType op, String metalake, Exception e) { return MetalakeExceptionHandler.INSTANCE.handle(op, metalake, "", e); } public static Response handleFilesetException( OperationType op, String fileset, String schema, Exception e) { return FilesetExceptionHandler.INSTANCE.handle(op, fileset, schema, e); } public static Response handleUserException( OperationType op, String user, String metalake, Exception e) { return UserExceptionHandler.INSTANCE.handle(op, user, metalake, e); } public static Response handleGroupException( OperationType op, String group, String metalake, Exception e) { return GroupExceptionHandler.INSTANCE.handle(op, group, metalake, e); } public static Response handleRoleException( OperationType op, String role, String metalake, Exception e) { return RoleExceptionHandler.INSTANCE.handle(op, role, metalake, e); } public static Response handleTopicException( OperationType op, String topic, String schema, Exception e) { return TopicExceptionHandler.INSTANCE.handle(op, topic, schema, e); } public static Response handleUserPermissionOperationException( OperationType op, String roles, String parent, Exception e) { return UserPermissionOperationExceptionHandler.INSTANCE.handle(op, roles, parent, e); } public static Response handleGroupPermissionOperationException( OperationType op, String roles, String parent, Exception e) { return GroupPermissionOperationExceptionHandler.INSTANCE.handle(op, roles, parent, e); } public static Response handleTagException( OperationType op, String tag, String parent, Exception e) { return TagExceptionHandler.INSTANCE.handle(op, tag, parent, e); } public static Response handlePolicyException( OperationType op, String policy, String parent, Exception e) { return PolicyExceptionHandler.INSTANCE.handle(op, policy, parent, e); } public static Response handleCredentialException( OperationType op, String metadataObjectName, Exception e) { return CredentialExceptionHandler.INSTANCE.handle(op, metadataObjectName, "", e); } public static Response handleModelException( OperationType op, String model, String schema, Exception e) { return ModelExceptionHandler.INSTANCE.handle(op, model, schema, e); } public static Response handleJobTemplateException( OperationType op, String jobTemplate, String metalake, Exception e) { return JobTemplateExceptionHandler.INSTANCE.handle(op, jobTemplate, metalake, e); } public static Response handleJobException( OperationType op, String job, String metalake, Exception e) { return JobExceptionHandler.INSTANCE.handle(op, job, metalake, e); } public static Response handleTestConnectionException(Exception e) { ErrorResponse response; if (e instanceof IllegalArgumentException) { response = ErrorResponse.illegalArguments(e.getMessage(), e); } else if (e instanceof ConnectionFailedException) { response = ErrorResponse.connectionFailed(e.getMessage(), e); } else if (e instanceof NotFoundException) { response = ErrorResponse.notFound(e.getClass().getSimpleName(), e.getMessage(), e); } else if (e instanceof AlreadyExistsException) { response = ErrorResponse.alreadyExists(e.getClass().getSimpleName(), e.getMessage(), e); } else if (e instanceof NotInUseException) { response = ErrorResponse.notInUse(e.getClass().getSimpleName(), e.getMessage(), e); } else { return Utils.internalError(e.getMessage(), e); } return Response.status(Response.Status.OK) .entity(response) .type(MediaType.APPLICATION_JSON) .build(); } public static Response handleOwnerException( OperationType type, String name, String metalake, Exception e) { return OwnerExceptionHandler.INSTANCE.handle(type, name, metalake, e); } public static Response handleRolePermissionOperationException( OperationType type, String name, String parent, Exception e) { return RolePermissionOperationHandler.INSTANCE.handle(type, name, parent, e); } public static Response handleStatisticException( OperationType type, String name, String parent, Exception e) { return StatisticExceptionHandler.INSTANCE.handle(type, name, parent, e); } public static Response handlePartitionStatsException( OperationType type, String name, String parent, Exception e) { return PartitionStatsExceptionHandler.INSTANCE.handle(type, name, parent, e); } private static class PartitionExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new PartitionExceptionHandler(); private static String getPartitionErrorMsg( String partition, String operation, String table, String reason) { return String.format( "Failed to operate partition(s)%s operation [%s] of table [%s], reason [%s]", partition, operation, table, reason); } @Override public Response handle(OperationType op, String partition, String table, Exception e) { String formatted = StringUtil.isBlank(partition) ? "" : " [" + partition + "]"; String errorMsg = getPartitionErrorMsg(formatted, op.name(), table, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof PartitionAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof UnsupportedOperationException) { return Utils.unsupportedOperation(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, partition, table, e); } } } private static class TableExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new TableExceptionHandler(); private static String getTableErrorMsg( String table, String operation, String schema, String reason) { return String.format( "Failed to operate table(s)%s operation [%s] under schema [%s], reason [%s]", table, operation, schema, reason); } @Override public Response handle(OperationType op, String table, String schema, Exception e) { String formatted = StringUtil.isBlank(table) ? "" : " [" + table + "]"; String errorMsg = getTableErrorMsg(formatted, op.name(), schema, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof TableAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof UnsupportedOperationException) { return Utils.unsupportedOperation(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, table, schema, e); } } } private static class SchemaExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new SchemaExceptionHandler(); private static String getSchemaErrorMsg( String schema, String operation, String catalog, String reason) { return String.format( "Failed to operate schema(s)%s operation [%s] under catalog [%s], reason [%s]", schema, operation, catalog, reason); } @Override public Response handle(OperationType op, String schema, String catalog, Exception e) { String formatted = StringUtil.isBlank(schema) ? "" : " [" + schema + "]"; String errorMsg = getSchemaErrorMsg(formatted, op.name(), catalog, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof SchemaAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NonEmptySchemaException) { return Utils.nonEmpty(errorMsg, e); } else if (e instanceof UnsupportedOperationException) { return Utils.unsupportedOperation(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, schema, catalog, e); } } } private static class CatalogExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new CatalogExceptionHandler(); private static String getCatalogErrorMsg( String catalog, String operation, String metalake, String reason) { return String.format( "Failed to operate catalog(s)%s operation [%s] under metalake [%s], reason [%s]", catalog, operation, metalake, reason); } @Override public Response handle(OperationType op, String catalog, String metalake, Exception e) { String formatted = StringUtil.isBlank(catalog) ? "" : " [" + catalog + "]"; String errorMsg = getCatalogErrorMsg(formatted, op.name(), metalake, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof ConnectionFailedException) { return Utils.connectionFailed(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof CatalogAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else if (e instanceof InUseException) { return Utils.inUse(errorMsg, e); } else if (e instanceof NonEmptyCatalogException) { return Utils.nonEmpty(errorMsg, e); } else { return super.handle(op, catalog, metalake, e); } } } private static class MetalakeExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new MetalakeExceptionHandler(); private static String getMetalakeErrorMsg(String metalake, String operation, String reason) { return String.format( "Failed to operate metalake(s)%s operation [%s], reason [%s]", metalake, operation, reason); } @Override public Response handle(OperationType op, String metalake, String parent, Exception e) { String formatted = StringUtil.isBlank(metalake) ? "" : " [" + metalake + "]"; String errorMsg = getMetalakeErrorMsg(formatted, op.name(), getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof MetalakeAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NoSuchMetalakeException) { return Utils.notFound(errorMsg, e); } else if (e instanceof MetalakeNotInUseException) { return Utils.notInUse(errorMsg, e); } else if (e instanceof MetalakeInUseException) { return Utils.inUse(errorMsg, e); } else if (e instanceof NonEmptyMetalakeException) { return Utils.nonEmpty(errorMsg, e); } else { return super.handle(op, metalake, parent, e); } } } private static class FilesetExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new FilesetExceptionHandler(); private static String getFilesetErrorMsg( String fileset, String operation, String schema, String reason) { return String.format( "Failed to operate fileset(s)%s operation [%s] under schema [%s], reason [%s]", fileset, operation, schema, reason); } @Override public Response handle(OperationType op, String fileset, String schema, Exception e) { String formatted = StringUtil.isBlank(fileset) ? "" : " [" + fileset + "]"; String errorMsg = getFilesetErrorMsg(formatted, op.name(), schema, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof FilesetAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, fileset, schema, e); } } } private static class UserExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new UserExceptionHandler(); private static String getUserErrorMsg( String user, String operation, String metalake, String reason) { if (metalake == null) { return String.format( "Failed to operate metalake admin user %s operation [%s], reason [%s]", user, operation, reason); } else { return String.format( "Failed to operate user %s operation [%s] under metalake [%s], reason [%s]", user, operation, metalake, reason); } } @Override public Response handle(OperationType op, String user, String metalake, Exception e) { String formatted = StringUtil.isBlank(user) ? "" : " [" + user + "]"; String errorMsg = getUserErrorMsg(formatted, op.name(), metalake, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof UserAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, user, metalake, e); } } } private static class GroupExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new GroupExceptionHandler(); private static String getGroupErrorMsg( String group, String operation, String metalake, String reason) { return String.format( "Failed to operate group %s operation [%s] under metalake [%s], reason [%s]", group, operation, metalake, reason); } @Override public Response handle(OperationType op, String group, String metalake, Exception e) { String formatted = StringUtil.isBlank(group) ? "" : " [" + group + "]"; String errorMsg = getGroupErrorMsg(formatted, op.name(), metalake, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof GroupAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, group, metalake, e); } } } private static class RoleExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new RoleExceptionHandler(); private static String getRoleErrorMsg( String role, String operation, String metalake, String reason) { return String.format( "Failed to operate role %s operation [%s] under metalake [%s], reason [%s]", role, operation, metalake, reason); } @Override public Response handle(OperationType op, String role, String metalake, Exception e) { String formatted = StringUtil.isBlank(role) ? "" : " [" + role + "]"; String errorMsg = getRoleErrorMsg(formatted, op.name(), metalake, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof RoleAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, role, metalake, e); } } } private static class TopicExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new TopicExceptionHandler(); private static String getTopicErrorMsg( String topic, String operation, String schema, String reason) { return String.format( "Failed to operate topic(s)%s operation [%s] under schema [%s], reason [%s]", topic, operation, schema, reason); } @Override public Response handle(OperationType op, String topic, String schema, Exception e) { String formatted = StringUtil.isBlank(topic) ? "" : " [" + topic + "]"; String errorMsg = getTopicErrorMsg(formatted, op.name(), schema, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof TopicAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, topic, schema, e); } } } private static class UserPermissionOperationExceptionHandler extends BasePermissionExceptionHandler { private static final ExceptionHandler INSTANCE = new UserPermissionOperationExceptionHandler(); @Override protected String getPermissionErrorMsg( String role, String operation, String parent, String reason) { return String.format( "Failed to operate role(s)%s operation [%s] under user [%s], reason [%s]", role, operation, parent, reason); } } private static class GroupPermissionOperationExceptionHandler extends BasePermissionExceptionHandler { private static final ExceptionHandler INSTANCE = new GroupPermissionOperationExceptionHandler(); @Override protected String getPermissionErrorMsg( String roles, String operation, String parent, String reason) { return String.format( "Failed to operate role(s)%s operation [%s] under group [%s], reason [%s]", roles, operation, parent, reason); } } private static class RolePermissionOperationHandler extends BasePermissionExceptionHandler { private static final ExceptionHandler INSTANCE = new RolePermissionOperationHandler(); @Override protected String getPermissionErrorMsg( String object, String operation, String parent, String reason) { return String.format( "Failed to operate object(%s) operation [%s] under role [%s], reason [%s]", object, operation, parent, reason); } } private abstract static class BasePermissionExceptionHandler extends BaseExceptionHandler { protected abstract String getPermissionErrorMsg( String role, String operation, String parent, String reason); @Override public Response handle(OperationType op, String roles, String parent, Exception e) { String formatted = StringUtil.isBlank(roles) ? "" : " [" + roles + "]"; String errorMsg = getPermissionErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, roles, parent, e); } } } private static class CredentialExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new CredentialExceptionHandler(); private static String getCredentialErrorMsg(String parent, String reason) { return String.format( "Failed to get credentials under object [%s], reason [%s]", parent, reason); } @Override public Response handle(OperationType op, String credential, String parent, Exception e) { String errorMsg = getCredentialErrorMsg(parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, credential, parent, e); } } } private static class TagExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new TagExceptionHandler(); private static String getTagErrorMsg( String tag, String operation, String parent, String reason) { return String.format( "Failed to operate tag(s)%s operation [%s] under object [%s], reason [%s]", tag, operation, parent, reason); } @Override public Response handle(OperationType op, String tag, String parent, Exception e) { String formatted = StringUtil.isBlank(tag) ? "" : " [" + tag + "]"; String errorMsg = getTagErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof TagAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof TagAlreadyAssociatedException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, tag, parent, e); } } } private static class PolicyExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new PolicyExceptionHandler(); private static String getPolicyErrorMsg( String policy, String operation, String parent, String reason) { return String.format( "Failed to operate policy(s)%s operation [%s] under object [%s], reason [%s]", policy, operation, parent, reason); } @Override public Response handle(OperationType op, String policy, String parent, Exception e) { String formatted = StringUtil.isBlank(policy) ? "" : " [" + policy + "]"; String errorMsg = getPolicyErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof PolicyAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof PolicyAlreadyAssociatedException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, policy, parent, e); } } } private static class OwnerExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new OwnerExceptionHandler(); private static String getOwnerErrorMsg( String name, String operation, String metalake, String reason) { return String.format( "Failed to execute %s owner operation [%s] under metalake [%s], reason [%s]", name, operation, metalake, reason); } @Override public Response handle(OperationType op, String name, String parent, Exception e) { String formatted = StringUtil.isBlank(name) ? "" : " [" + name + "]"; String errorMsg = getOwnerErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, name, parent, e); } } } private static class ModelExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new ModelExceptionHandler(); private static String getModelErrorMsg( String model, String operation, String schema, String reason) { return String.format( "Failed to operate model(s)%s operation [%s] under schema [%s], reason [%s]", model, operation, schema, reason); } @Override public Response handle(OperationType op, String model, String schema, Exception e) { String formatted = StringUtil.isBlank(model) ? "" : " [" + model + "]"; String errorMsg = getModelErrorMsg(formatted, op.name(), schema, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof ModelAlreadyExistsException || e instanceof ModelVersionAliasesAlreadyExistException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else { return super.handle(op, model, schema, e); } } } private static class JobTemplateExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new JobTemplateExceptionHandler(); private static String getJobTemplateErrorMsg( String jobTemplate, String operation, String parent, String reason) { return String.format( "Failed to operate job template(s)%s operation [%s] under object [%s], reason [%s]", jobTemplate, operation, parent, reason); } @Override public Response handle(OperationType op, String jobTemplate, String parent, Exception e) { String formatted = StringUtil.isBlank(jobTemplate) ? "" : " [" + jobTemplate + "]"; String errorMsg = getJobTemplateErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof JobTemplateAlreadyExistsException) { return Utils.alreadyExists(errorMsg, e); } else if (e instanceof InUseException) { return Utils.inUse(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else { return super.handle(op, jobTemplate, parent, e); } } } private static class JobExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new JobExceptionHandler(); private static String getJobErrorMsg( String jobTemplate, String operation, String parent, String reason) { return String.format( "Failed to operate job(s)%s operation [%s] under object [%s], reason [%s]", jobTemplate, operation, parent, reason); } @Override public Response handle(OperationType op, String jobTemplate, String parent, Exception e) { String formatted = StringUtil.isBlank(jobTemplate) ? "" : " [" + jobTemplate + "]"; String errorMsg = getJobErrorMsg(formatted, op.name(), parent, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof NotInUseException) { return Utils.notInUse(errorMsg, e); } else if (e instanceof ForbiddenException) { return Utils.forbidden(errorMsg, e); } else { return super.handle(op, jobTemplate, parent, e); } } } private static class StatisticExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new StatisticExceptionHandler(); private static String getStatisticErrorMsg( String name, String operation, String parent, String reason) { return String.format( "Failed to operate statistic(s)%s operation [%s] on parent [%s], reason [%s]", name, operation, parent, reason); } @Override public Response handle(OperationType op, String name, String object, Exception e) { String formatted = StringUtil.isBlank(name) ? "" : " [" + name + "]"; String errorMsg = getStatisticErrorMsg(formatted, op.name(), object, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof UnsupportedOperationException) { return Utils.unsupportedOperation(errorMsg, e); } else { return super.handle(op, name, object, e); } } } private static class PartitionStatsExceptionHandler extends BaseExceptionHandler { private static final ExceptionHandler INSTANCE = new PartitionStatsExceptionHandler(); private static String getPartitionStatsErrorMsg( String partition, String operation, String table, String reason) { return String.format( "Failed to operate partition stats%s operation [%s] of table [%s], reason [%s]", partition, operation, table, reason); } @Override public Response handle(OperationType op, String partition, String table, Exception e) { String formatted = StringUtil.isBlank(partition) ? "" : " [" + partition + "]"; String errorMsg = getPartitionStatsErrorMsg(formatted, op.name(), table, getErrorMsg(e)); LOG.warn(errorMsg, e); if (e instanceof IllegalArgumentException) { return Utils.illegalArguments(errorMsg, e); } else if (e instanceof NotFoundException) { return Utils.notFound(errorMsg, e); } else if (e instanceof UnsupportedOperationException) { return Utils.unsupportedOperation(errorMsg, e); } else { return super.handle(op, partition, table, e); } } } @VisibleForTesting static class BaseExceptionHandler extends ExceptionHandler { private static final String EXCEPTION_KEYWORD = "Exception: "; private static String getBaseErrorMsg( String object, String operation, String parent, String reason) { return String.format( "Failed to operate object%s operation [%s]%s, reason [%s]", object, operation, parent, reason); } @Override public Response handle(OperationType op, String object, String parent, Exception e) { String formattedObject = StringUtil.isBlank(object) ? "" : " [" + object + "]"; String formattedParent = StringUtil.isBlank(parent) ? "" : " under [" + parent + "]"; String errorMsg = getBaseErrorMsg(formattedObject, op.name(), formattedParent, getErrorMsg(e)); LOG.error(errorMsg, e); return Utils.internalError(errorMsg, e); } @VisibleForTesting static String getErrorMsg(Throwable throwable) { if (throwable == null || throwable.getMessage() == null) { return ""; } String message = throwable.getMessage(); int pos = message.lastIndexOf(EXCEPTION_KEYWORD); if (pos == -1) { return message; } else { return message.substring(pos + EXCEPTION_KEYWORD.length()); } } } }
googleapis/google-cloud-java
37,843
java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/ListAttackPathsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v2/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v2; /** * * * <pre> * Request message for listing the attack paths for a given simulation or valued * resource. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.ListAttackPathsRequest} */ public final class ListAttackPathsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.ListAttackPathsRequest) ListAttackPathsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListAttackPathsRequest.newBuilder() to construct. private ListAttackPathsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAttackPathsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAttackPathsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListAttackPathsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListAttackPathsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.ListAttackPathsRequest.class, com.google.cloud.securitycenter.v2.ListAttackPathsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 4; private int pageSize_ = 0; /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (pageSize_ != 0) { output.writeInt32(4, pageSize_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v2.ListAttackPathsRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.v2.ListAttackPathsRequest other = (com.google.cloud.securitycenter.v2.ListAttackPathsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v2.ListAttackPathsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for listing the attack paths for a given simulation or valued * resource. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.ListAttackPathsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.ListAttackPathsRequest) com.google.cloud.securitycenter.v2.ListAttackPathsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListAttackPathsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListAttackPathsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.ListAttackPathsRequest.class, com.google.cloud.securitycenter.v2.ListAttackPathsRequest.Builder.class); } // Construct using com.google.cloud.securitycenter.v2.ListAttackPathsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageToken_ = ""; pageSize_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListAttackPathsRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListAttackPathsRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.v2.ListAttackPathsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v2.ListAttackPathsRequest build() { com.google.cloud.securitycenter.v2.ListAttackPathsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListAttackPathsRequest buildPartial() { com.google.cloud.securitycenter.v2.ListAttackPathsRequest result = new com.google.cloud.securitycenter.v2.ListAttackPathsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v2.ListAttackPathsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageSize_ = pageSize_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v2.ListAttackPathsRequest) { return mergeFrom((com.google.cloud.securitycenter.v2.ListAttackPathsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v2.ListAttackPathsRequest other) { if (other == com.google.cloud.securitycenter.v2.ListAttackPathsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Name of parent to list attack paths. * * Valid formats: * `organizations/{organization}`, * `organizations/{organization}/simulations/{simulation}` * `organizations/{organization}/simulations/{simulation}/attackExposureResults/{attack_exposure_result_v2}` * `organizations/{organization}/simulations/{simulation}/valuedResources/{valued_resource}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The filter expression that filters the attack path in the response. * Supported fields: * * * `valued_resources` supports = * </pre> * * <code>string filter = 2;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The value returned by the last `ListAttackPathsResponse`; indicates * that this is a continuation of a prior `ListAttackPaths` call, and * that the system should return the next page of data. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The maximum number of results to return in a single response. Default is * 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 4;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000008); pageSize_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.ListAttackPathsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.ListAttackPathsRequest) private static final com.google.cloud.securitycenter.v2.ListAttackPathsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.ListAttackPathsRequest(); } public static com.google.cloud.securitycenter.v2.ListAttackPathsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAttackPathsRequest> PARSER = new com.google.protobuf.AbstractParser<ListAttackPathsRequest>() { @java.lang.Override public ListAttackPathsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAttackPathsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAttackPathsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListAttackPathsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jena
37,988
jena-base/src/main/java/org/apache/jena/atlas/iterator/Iter.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.atlas.iterator; import java.io.PrintStream; import java.util.*; import java.util.function.*; import java.util.stream.Collector; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.apache.jena.atlas.io.IO; import org.apache.jena.atlas.lib.Closeable; import org.apache.jena.atlas.lib.Sink; /** * Iter provides general utilities for working with {@linkplain Iterator Iterators}. * This class provides functionality similar to {@code Stream} * except for iterators (and hence single threaded). * <p> * Style 1: functional style using statics. * * <pre> * import static org.apache.jena.atlas.iterator.Iter.*; * * filter(map(iterator, function), predicate) * </pre> * * Style 2: Stream-like: The class {@code Iter} provides methods to call on an iterator. * * <pre> * import static org.apache.jena.atlas.iterator.Iter.iter; * * iter(iterator).map(...).filter(...) * </pre> * * The operations attempt to close iterators - see {@link Iter#close(Iterator)}. * Caution - not all Iterators in this package provide close or passdown close operations * {@link IteratorOnClose} adds a {@link Runnable} action called once on close. * <p> * Iterators do not guarantee {@code .remove}. * * @param <T> the type of element over which an instance of {@code Iter} iterates, */ public class Iter<T> implements IteratorCloseable<T> { /** Shorter form of "forEachRemaining" */ public static <T> void forEach(Iterator<T> iter, Consumer<T> action) { iter.forEachRemaining(action); } public static <T> Stream<T> asStream(Iterator<T> iterator) { return asStream(iterator, false); } public static <T> Stream<T> asStream(Iterator<T> iterator, boolean parallel) { int characteristics = Spliterator.IMMUTABLE; Stream<T> stream1 = StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, characteristics), parallel); Stream<T> stream2 = stream1.onClose(()->close(iterator)); return stream2; } // ---- Special iterators. /** @deprecated Use {@link #singletonIterator} */ @Deprecated public static <T> Iterator<T> singleton(T item) { return singletonIterator(item); } public static <T> Iterator<T> singletonIterator(T item) { // There is a singleton iterator in Collections but it is not public. return new SingletonIterator<>(item); } public static <T> Iterator<T> nullIterator() { return Collections.emptyIterator(); } // -- Stream inspired names public static <T> Iter<T> empty() { return Iter.iter(Collections.emptyIterator()); } public static <T> Iter<T> of(T item) { return Iter.iter(singletonIterator(item)); } @SafeVarargs public static <T> Iter<T> of(T ...items) { return Iter.iter(List.of(items).iterator()); } public static<T> Iter<T> ofNullable(T t) { return t == null ? Iter.empty() : Iter.of(t); } /** * Return an iterator that does not permit remove. * This makes an "UnmodifiableIterator". */ public static <T> Iterator<T> noRemove(Iterator<T> iter) { return new IteratorWrapper<T>(iter) { @Override public final void remove() { throw new UnsupportedOperationException("remove"); } }; } // ---- Collectors. /** Collect an iterator into a set. */ public static <T> Set<T> toSet(Iterator<? extends T> stream) { return collect(stream, Collectors.toSet()); } /** Collect an iterator into a list. */ public static <T> List<T> toList(Iterator<? extends T> stream) { return collect(stream, Collectors.toList()); } /** * Create another iterator without risk of concurrent modification * exceptions. This materializes the input iterator. */ public static <T> Iterator<T> iterator(Iterator<? extends T> iterator) { List<T> x = Iter.toList(iterator); return x.iterator(); } // Note fold-left and fold-right // http://en.wikipedia.org/wiki/Fold_%28higher-order_function%29 // This reduce is a kind of fold-left (take first element, apply to rest of list) // and can deal with lists of zero elements. // -- Operations on iterators. @FunctionalInterface public interface Folder<X, Y> extends BiFunction<Y,X,Y> {} public static <T, R> R foldLeft(Iterator<? extends T> stream, R value, Folder<T, R> function) { // Tail recursion, unwound for (; stream.hasNext();) { T item = stream.next(); value = function.apply(value, item); } return value; } public static <T, R> R foldRight(Iterator<? extends T> stream, R value, Folder<T, R> function) { // Recursive. if ( !stream.hasNext() ) return value; T item = stream.next(); return function.apply(foldRight(stream, value, function), item); } public static <T> Optional<T> reduce(Iterator<T> iter, BinaryOperator<T> accumulator) { T r = reduce(iter, null, accumulator); return Optional.ofNullable(r); } public static <T> T reduce(Iterator<T> iter, T identity, BinaryOperator<T> accumulator) { T result = identity; while(iter.hasNext()) { T elt = iter.next(); result = (result == null) ? elt : accumulator.apply(result, elt); } return result; } // ---- min and max public static <T> Optional<T> min(Iterator<T> iter, Comparator<T> comparator) { T x = null; while(iter.hasNext()) { T elt = iter.next(); if ( x == null ) x = elt; else { int cmp = comparator.compare(x, elt); if ( cmp > 0 ) x = elt; } } return Optional.ofNullable(x); } public static <T> Optional<T> max(Iterator<T> iter, Comparator<T> comparator) { // Or min(iter, comparator.reversed()) T x = null; while(iter.hasNext()) { T elt = iter.next(); if ( x == null ) x = elt; else { int cmp = comparator.compare(x, elt); if ( cmp < 0 ) x = elt; } } return Optional.ofNullable(x); } // ---- collect /** See {@link Stream#collect(Collector)} */ public static <T, R, A> R collect(Iterator<T> iter, Collector<? super T, A, R> collector) { A a = collect(iter, collector.supplier(), collector.accumulator()); return collector.finisher().apply(a); } /** See {@link Stream#collect(Supplier, BiConsumer, BiConsumer)}, except without the {@code BiConsumer<R, R> combiner} */ public static <T,R> R collect(Iterator<T> iter, Supplier<R> supplier, BiConsumer<R, ? super T> accumulator) { R result = supplier.get(); apply(iter, elt -> accumulator.accept(result, elt)); return result; } /** Act on elements of an iterator. * @see #map(Iterator, Function) */ public static <T> void apply(Iterator<? extends T> iter, Consumer<T> action) { try { iter.forEachRemaining(action); } finally { Iter.close(iter); } } // ---- Filter public static <T> Iterator<T> filter(final Iterator<? extends T> stream, final Predicate<T> filter) { return new IterFiltered<T>(stream, filter); } public static <T> Iterator<T> filterDrop(final Iterator<? extends T> stream, final Predicate<T> filter) { return filter(stream, filter.negate()); } /** @deprecated Use {@link #filterDrop} */ @Deprecated public static <T> Iterator<T> notFilter(final Iterator<? extends T> stream, final Predicate<T> filter) { return filterDrop(stream, filter); } // Filter-related private static final class IterFiltered<T> implements IteratorCloseable<T> { private final Iterator<? extends T> stream; private final Predicate<T> filter; private boolean finished = false; private boolean slotOccupied = false; private T slot; private IterFiltered(Iterator<? extends T> stream, Predicate<T> filter) { this.stream = stream; this.filter = filter; } @Override public boolean hasNext() { if ( finished ) return false; while (!slotOccupied) { if ( !stream.hasNext() ) { closeIterator(); break; } T nextItem = stream.next(); if ( filter.test(nextItem) ) { slot = nextItem; slotOccupied = true; break; } } return slotOccupied; } @Override public T next() { if ( hasNext() ) { slotOccupied = false; return slot; } closeIterator(); throw new NoSuchElementException("filter.next"); } @Override public void forEachRemaining(Consumer<? super T> action) { if ( finished ) return; if ( slotOccupied ) { action.accept(slot); } T t; while (stream.hasNext()) { t = stream.next(); if ( filter.test(t) ) action.accept(t); } slotOccupied = false; } private void closeIterator() { if ( finished ) return; finished = true; Iter.close(stream); } @Override public void close() { closeIterator(); } } /** * Return true if every element of stream passes the filter (reads the * stream until the first element not passing the filter) */ public static <T> boolean allMatch(Iterator<T> iter, Predicate<? super T> predicate) { try { while (iter.hasNext()) { T item = iter.next(); if ( !predicate.test(item) ) { Iter.close(iter); return false; } } return true; } finally { Iter.close(iter); } } /** * Return true if one or more elements of stream passes the filter (reads the * stream to first element passing the filter) */ public static <T> boolean anyMatch(Iterator<T> iter, Predicate<? super T> predicate) { try { while (iter.hasNext()) { T item = iter.next(); if ( predicate.test(item) ) { Iter.close(iter); return true; } } return false; } finally { Iter.close(iter); } } /** * Return true if none of the elements of the iterator passes the predicate test reads * the stream to first element passing the filter) */ public static <T> boolean noneMatch(Iterator<T> iter, Predicate<? super T> predicate) { return ! anyMatch(iter, predicate); } /** * Return an Optional with the first element of an iterator that matches the predicate. * Return {@code Optional.empty} if none match. * Reads the iterator until the first match. */ public static <T> Optional<T> findFirst(Iterator<T> iter, Predicate<? super T> predicate) { while ( iter.hasNext() ) { T item = iter.next(); if ( predicate.test(item) ) return Optional.of(item); } return Optional.empty(); } /** * Return an Optional with the last element of an iterator that matches the predicate. * Return {@code Optional.empty} if no match. * Reads the iterator to completion. */ public static <T> Optional<T> findLast(Iterator<T> iter, Predicate<? super T> predicate) { T thing = null; while ( iter.hasNext() ) { T item = iter.next(); if ( predicate.test(item) ) thing = item; } return Optional.ofNullable(thing); } /** * Return an Optional with an element of an iterator that matches the predicate. * Return {@code Optional.empty} if none match. * The element returned is not specified by the API contract. */ public static <T> Optional<T> findAny(Iterator<T> iter, Predicate<? super T> predicate) { return findFirst(iter, predicate); } // ---- Map /** * Apply a function to every element of an iterator, transforming it * from a {@code T} to an {@code R}. */ public static <T, R> Iterator<R> map(Iterator<? extends T> stream, Function<T, R> converter) { return new IterMap<>(stream, converter); } private static final class IterMap<T,R> implements IteratorCloseable<R> { private final Iterator<? extends T> stream; private final Function<T, R> converter; private IterMap(Iterator<? extends T> stream, Function<T, R> converter) { this.stream = stream; this.converter = converter; } @Override public boolean hasNext() { return stream.hasNext(); } @Override public R next() { return converter.apply(stream.next()); } @Override public void forEachRemaining(Consumer<? super R> action) { stream.forEachRemaining(item->action.accept(converter.apply(item))); } @Override public void close() { Iter.close(stream); } } /** * Apply a function to every element of an iterator, to produce possibly multiple mapping each time. * See {@link Stream#flatMap} */ public static <T, R> Iterator<R> flatMap(Iterator<T> iter, Function<T, Iterator<R>> mapper) { return new IteratorFlatMap<>(iter, mapper); } /** * Apply an action to everything in stream, yielding a stream of the * same items. */ public static <T> Iterator<T> operate(final Iterator<? extends T> stream, final Consumer<T> action) { final Iterator<T> iter = new IterOperate<T>(stream, action); return iter; } private static final class IterOperate<T> implements IteratorCloseable<T> { private final Iterator<? extends T> stream; private final Consumer<T> action; private IterOperate(Iterator<? extends T> stream, Consumer<T> action) { this.stream = stream; this.action = action; } @Override public boolean hasNext() { return stream.hasNext(); } @Override public T next() { T t = stream.next(); action.accept(t); return t; } @Override public void forEachRemaining(Consumer<? super T> action) { stream.forEachRemaining(item->{ this.action.accept(item); action.accept(item); }); } @Override public void close() { Iter.close(stream); } } /** Print an iterator as it gets used - this adds a printing wrapper */ public static <T> Iterator<T> printWrapper(final Iterator<? extends T> stream) { return Iter.printWrapper(System.out, stream); } /** Print an iterator as it gets used - this adds a printing wrapper */ public static <T> Iterator<T> printWrapper(final PrintStream out, final Iterator<? extends T> stream) { return Iter.operate(stream, out::println); } /** Join two iterators. * If there, potentially, going to be many iterators, it is better to * create an {@link IteratorConcat} explicitly and add each iterator. */ public static <T> Iterator<T> append(Iterator<? extends T> iter1, Iterator<? extends T> iter2) { return IteratorCons.create(iter1, iter2); } /** Return an iterator that will see each element of the underlying iterator only once. * Note that this need working memory to remember the elements already seen. */ public static <T> Iterator<T> distinct(Iterator<T> iter) { return filter(iter, new FilterUnique<T>()); } /** Remove adjacent duplicates. This operation does not need * working memory to remember the all elements already seen, * just a slot for the last element seen. */ public static <T> Iterator<T> distinctAdjacent(Iterator<T> iter) { return filter(iter, new FilterDistinctAdjacent<T>()); } /** Remove nulls from an iterator */ public static <T> Iterator<T> removeNulls(Iterator<T> iter) { return filter(iter, Objects::nonNull); } /** Step forward up to {@code steps} places. * <br/>Return number of steps taken. * * @apiNote * The iterator is moved at most {@code steps} places with no overshoot. * The iterator can be used afterwards. */ public static int step(Iterator<?> iter, int steps) { for ( int i = 0; i < steps; i++) { if ( ! iter.hasNext() ) return i; iter.next(); } return steps; } /** Take the first N elements of an iterator - stop early if too few * @see #limit(Iterator, long) */ public static <T> List<T> take(Iterator<T> iter, int N) { iter = new IterLimit<>(iter, N); List<T> x = new ArrayList<>(N); while ( iter.hasNext() ) x.add(iter.next()); return x; } /** Create an iterator such that it yields elements while a predicate test on * the elements is true, end the iteration. * @see Iter#filter(Iterator, Predicate) */ public static <T> Iterator<T> takeWhile(Iterator<T> iter, Predicate<T> predicate) { return new IteratorTruncate<>(iter, predicate); } /** * Create an iterator such that it yields elements until a predicate test on * the elements becomes true, end the iteration. * * @see Iter#filter(Iterator, Predicate) */ public static <T> Iterator<T> takeUntil(Iterator<T> iter, Predicate<T> predicate) { return new IteratorTruncate<>(iter, predicate.negate()); } /** Create an iterator such that elements from the front while * a predicate test become true are dropped then return all remaining elements * are iterated over. * The first element where the predicted becomes true is the first element of the * returned iterator. */ public static <T> Iterator<T> dropWhile(Iterator<T> iter, Predicate<T> predicate) { PeekIterator<T> iter2 = new PeekIterator<>(iter); for(;;) { T elt = iter2.peek(); if ( elt == null ) return Iter.nullIterator(); if ( ! predicate.test(elt) ) break; } return iter2; } /** Create an iterator such that elements from the front until * a predicate test become true are dropped then return all remaining elements * are iterated over. * The first element where the predicate becomes true is the first element of the * returned iterator. */ public static <T> Iterator<T> dropUntil(Iterator<T> iter, Predicate<T> predicate) { return dropWhile(iter, predicate.negate()); } /** Return an iterator that is limited to the given number of elements. * If it is shorter than the limit, stop at the end. * @see #take(Iterator, int) */ public static <X> Iterator<X> limit(Iterator<X> iterator, long limit) { return new IterLimit<>(iterator, limit); } private static class IterLimit<T> implements IteratorCloseable<T> { private long count = 0; private final long limit; private boolean closed = false; private final Iterator<? extends T> iterator; private IterLimit(Iterator<? extends T> stream, long limit) { this.iterator = stream; this.limit = limit; } @Override public boolean hasNext() { if ( count >= limit ) { closeOnce(); return false; } boolean b = iterator.hasNext(); if ( !b ) closeOnce(); return b; } @Override public T next() { try { T t = iterator.next(); count++; return t; } catch (NoSuchElementException ex) { closeOnce(); throw ex; } } private void closeOnce() { if ( ! closed ) Iter.close(iterator); closed = true; } @Override public void close() { closeOnce(); } @Override public void remove() { // But leave the count as-is. iterator.remove(); } } /** Skip over a number of elements of an iterator */ public static <X> Iterator<X> skip(Iterator<X> iterator, long limit) { for ( long i = 0; i < limit; i++ ) { if ( iterator.hasNext() ) iterator.next(); else // Now exhausted. break; } return iterator; } /** Count the iterator (this is destructive on the iterator) */ public static <T> long count(Iterator<T> iterator) { ActionCount<T> action = new ActionCount<>(); iterator.forEachRemaining(action); return action.getCount(); } /** Consume the iterator */ public static <T> void consume(Iterator<T> iterator) { iterator.forEachRemaining(x->{}); // Do nothing. } /** Create a string from an iterator, using the separator. Note: this consumes the iterator. */ public static <T> String asString(Iterator<T> stream, String sep) { return Iter.iter(stream).map(x->x.toString()).collect(Collectors.joining(sep)); } /** Create a string from an iterator, using the separator, prefix and suffix. Note: this consumes the iterator. */ public static <T> String asString(Iterator<T> stream, CharSequence sep, CharSequence prefix, CharSequence suffix) { return Iter.iter(stream).map(x->x.toString()).collect(Collectors.joining(sep, prefix, suffix)); } /** Close iterator if marked with {@link Closeable}. */ public static <T> void close(Iterator<T> iter) { if ( iter instanceof Closeable cIter ) cIter.close(); } /** * Run an action when an iterator is closed. This assumes the iterator closed * with {@link Iter#close}. * <p> * Convenience function for closing a {@link java.io.Closeable} after use when * passing an iterator out of scope. */ public static <T> IteratorCloseable<T> onCloseIO(Iterator<T> iterator, java.io.Closeable ioThing) { return onClose(iterator, ()->IO.close(ioThing)); } /** * Run an action when an iterator is closed. * This assumes the iterator closed with {@link Iter#close}. */ public static <T> IteratorOnClose<T> onClose(Iterator<T> iter, Runnable closeHandler) { return IteratorOnClose.atEnd(iter, closeHandler); } /** * Print an iterator to stdout, return a copy of the iterator. Printing * occurs now. See {@link #debug} for an operation to print as the * iterator is used. */ public static <T> Iterator<T> log(Iterator<T> stream) { return log(System.out, stream); } /** * Print an iterator to stdout, return a copy of the iterator. Printing * occurs now. See {@link #debug} for an operation to print as the * iterator is used. */ public static <T> Iterator<T> log(final PrintStream out, Iterator<T> stream) { Iterator<T> iter = debug(out, stream); // And force it to run. if ( ! iter.hasNext() ) out.println("<empty>"); return Iter.toList(iter).iterator(); } /** * Print an iterator to stdout, return a copy of the iterator. Printing * occurs when the iterator is used. See {@link #log} for * an operation to print now. */ public static <T> Iterator<T> debug(Iterator<T> stream) { return debug(System.out, stream); } /** * Print an iterator, return a copy of the iterator. Printing * occurs as the returned iterator is used. */ public static <T> Iterator<T> debug(final PrintStream out, Iterator<T> stream) { try { return map(stream, item -> {out.println(item); return item;}); } finally { out.flush(); } } /** Print an iterator (destructive) */ public static <T> void print(Iterator<T> stream) { print(System.out, stream); } /** Print an iterator (destructive) */ public static <T> void print(PrintStream out, Iterator<T> stream) { stream.forEachRemaining(out::println); } /** Send the elements of the iterator to a sink - consumes the iterator */ public static <T> void sendToSink(Iterator<T> iter, Sink<T> sink) { iter.forEachRemaining(sink::send); sink.close(); } // ---- // Iter class part : factories public static <T> Iter<T> iter(Iter<T> iter) { return iter; } public static <T> Iter<T> iter(Collection<T> collection) { return iter(collection.iterator()); } public static <T> Iter<T> iter(Iterator<T> iterator) { Objects.requireNonNull(iterator); if ( iterator instanceof Iter<T> iter ) return iter; return new Iter<>(iterator); } public static <T> Iter<T> singletonIter(T item) { return iter(singletonIterator(item)); } public static <T> Iter<T> nullIter() { return iter(nullIterator()); } /** * Materialize an iterator, that is, force it to run now - useful in * debugging or when iteration may modify underlying datastructures. */ public static <T> Iterator<T> materialize(Iterator<T> iter) { return toList(iter).iterator(); } /** An {@code Iter} of 2 {@code Iter}'s */ public static <T> Iter<T> concat(Iter<T> iter1, Iter<T> iter2) { if ( iter1 == null ) return iter2; if ( iter2 == null ) return iter1; return iter1.append(iter2); } /** An {@code Iterator} of 2 {@code Iterator}'s. * See also {@link IteratorConcat}. */ public static <T> Iter<T> concat(Iterator<T> iter1, Iterator<T> iter2) { if ( iter1 == null ) return iter(iter2); if ( iter2 == null ) return iter(iter1); return iter(iter1).append(iter(iter2)); } /** Return the first element of an iterator or null if no such element. * @param iter * @return An item or null. */ public static <T> T first(Iterator<T> iter) { return first(iter, (x)-> true ); } /** Skip to the first element meeting a condition and return that element. */ public static <T> T first(Iterator<T> iter, Predicate<T> filter) { while (iter.hasNext()) { T t = iter.next(); if ( filter.test(t) ) return t; } return null; } /** Skip to the first element meeting a condition and return that element's index (zero-based). */ public static <T> int firstIndex(Iterator<T> iter, Predicate<T> filter) { for (int idx = 0; iter.hasNext(); idx++) { T t = iter.next(); if ( filter.test(t) ) return idx; } return -1; } /** Return the last element or null, if no elements. This operation consumes the iterator. */ public static <T> T last(Iterator<T> iter) { return last(iter, (x)->true); } /** Return the last element satisfying a predicate. This operation consumes the whole iterator. */ public static <T> T last(Iterator<T> iter, Predicate<? super T> filter) { T thing = null; while (iter.hasNext()) { T t = iter.next(); if ( filter.test(t) ) thing = t; } return thing; } /** Find the last occurrence, defined by a predicate, in a list. */ public static <T> int lastIndex(List<T> list, Predicate<? super T> filter) { for (int idx = list.size()-1; idx >= 0; idx--) { T t = list.get(idx); if ( filter.test(t) ) return idx; } return -1; } /** reverse iterator for a list */ public static <T> Iterator<T> reverseIterate(List<T> list) { ListIterator<T> iter = list.listIterator(list.size()); return new Iterator<>() { @Override public boolean hasNext() { return iter.hasPrevious(); } @Override public T next() { return iter.previous(); } }; } /** reverse for each on a list. */ public static <T> void reverseIterate(List<T> list, Consumer<? super T> action) { ListIterator<T> iter = list.listIterator(list.size()); while(iter.hasPrevious()) { T t = iter.previous(); action.accept(t); } } // ------------------------------------------------------ // The class. private Iterator<T> iterator; private Iter(Iterator<T> iterator) { this.iterator = iterator; } @Override public void close() { Iter.close(iterator); } /** Apply the Consumer to each element of the iterator */ public void forEach(Consumer<T> action) { iterator.forEachRemaining(action); } @Override public void forEachRemaining(Consumer<? super T> action) { iterator.forEachRemaining(action); } /** Consume the {@code Iter} and produce a {@code Set} */ public Set<T> toSet() { return toSet(iterator); } /** Consume the {@code Iter} and produce a {@code List} */ public List<T> toList() { return toList(iterator); } public void sendToSink(Sink<T> sink) { sendToSink(iterator, sink); } public T first() { return first(iterator); } public T last() { return last(iterator); } /** Skip to the first element meeting a condition and return that element. */ public T first(Predicate<T> filter) { return first(iterator, filter); } /** Skip to the first element meeting a condition and return that element's index (zero-based). */ public int firstIndex(Predicate<T> filter) { return firstIndex(iterator, filter); } /** Filter by predicate */ public Iter<T> filter(Predicate<T> filter) { return iter(filter(iterator, filter)); } public boolean allMatch(Predicate<? super T> predicate) { return allMatch(iterator, predicate); } public boolean anyMatch(Predicate<? super T> predicate) { return anyMatch(iterator, predicate); } public boolean noneMatch(Predicate<? super T> predicate) { return noneMatch(iterator, predicate); } public Optional<T> findFirst(Predicate<? super T> predicate) { return findFirst(iterator, predicate); } public Optional<T> findAny(Predicate<? super T> predicate) { return findAny(iterator, predicate); } public Optional<T> findLast(Predicate<? super T> predicate) { return findLast(iterator, predicate); } /** Remove nulls */ public Iter<T> removeNulls() { return iter(removeNulls(this)); } /** Map each element using given function */ public <R> Iter<R> map(Function<T, R> converter) { return iter(map(iterator, converter)); } /** FlatMap each element using given function of element to iterator of mapped elements.s */ public <R> Iter<R> flatMap(Function<T, Iterator<R>> converter) { return iter(flatMap(iterator, converter)); } /** * Apply an action to everything in the stream, yielding a stream of the * original items. */ public Iter<T> operate(Consumer<T> action) { return iter(operate(iterator, action)); } public <R> R foldLeft(R initial, Folder<T, R> accumulator) { return foldLeft(iterator, initial, accumulator); } public <R> R foldRight(R initial, Folder<T, R> accumulator) { return foldRight(iterator, initial, accumulator); } /** Reduce. * This reduce is fold-left (take first element, apply to rest of list) */ public Optional<T> reduce(BinaryOperator<T> accumulator) { return reduce(iterator, accumulator); } public T reduce(T identity, BinaryOperator<T> accumulator) { return reduce(iterator, identity, accumulator); } public Optional<T> min(Comparator<T> comparator) { return min(iterator, comparator); } public Optional<T> max(Comparator<T> comparator) { return max(iterator, comparator); } /** See {@link Stream#collect(Supplier, BiConsumer, BiConsumer)}, except without the {@code BiConsumer<R, R> combiner} */ public <R> R collect(Supplier<R> supplier, BiConsumer<R, T> accumulator/*, BiConsumer<R, R> combiner*/) { return collect(iterator, supplier, accumulator); } /** See {@link Stream#collect(Collector)} */ public <R, A> R collect(Collector<? super T, A, R> collector) { return collect(iterator, collector); } /** Apply an action to every element of an iterator */ public void apply(Consumer<T> action) { iterator.forEachRemaining(action); } /** Join on an {@code Iterator}.. * If there are going to be many iterators, it is better to create an {@link IteratorConcat} * and {@code .add} each iterator. The overheads are much lower. */ public Iter<T> append(Iterator<T> iter) { return iter(IteratorCons.create(iterator, iter)); } /** Return an Iter that yields at most the first N items */ public Iter<T> take(int N) { return iter(take(iterator, N)); } /** Create an {@code Iter} such that it yields elements while a predicate test on * the elements is true, end the iteration. * @see Iter#filter(Predicate) */ public Iter<T> takeWhile(Predicate<T> predicate) { return iter(takeWhile(iterator, predicate)); } /** * Create an {@code Iter} such that it yields elements until a predicate test on * the elements becomes true, end the iteration. * * @see Iter#filter(Predicate) */ public Iter<T> takeUntil(Predicate<T> predicate) { return iter(takeUntil(iterator, predicate)); } /** Create an {@code Iter} such that elements from the front while * a predicate test become true are dropped then return all remaining elements * are iterated over. * The first element where the predicted becomes true is the first element of the * returned iterator. */ public Iter<T> dropWhile(Predicate<T> predicate) { return iter(dropWhile(iterator, predicate)); } /** Create an {@code Iter} such that elements from the front until * a predicate test become true are dropped then return all remaining elements * are iterated over. * The first element where the predicate becomes true is the first element of the * returned iterator. */ public Iter<T> dropUntil(Predicate<T> predicate) { return iter(dropWhile(iterator, predicate.negate())); } /** Limit the number of elements. */ public Iter<T> limit(long N) { return Iter.iter(limit(null, N)); } /** Skip over a number of elements. */ public Iter<T> skip(long N) { return Iter.iter(skip(null, N)); } /** Count the iterator (this is destructive on the iterator) */ public long count() { ActionCount<T> action = new ActionCount<>(); this.forEachRemaining(action); return action.getCount(); } /** * Return an {@code Iter} that will see each element of the underlying iterator only once. * Note that this need working memory to remember the elements already seen. */ public Iter<T> distinct() { return iter((distinct(iterator))); } /** Remove adjacent duplicates. This operation does not need * working memory to remember the all elements already seen, * just a slot for the last element seen. */ public Iter<T> distinctAdjacent() { return iter(distinctAdjacent(iterator)); } // ---- Iterator @Override public boolean hasNext() { return iterator.hasNext(); } @Override public T next() { return iterator.next(); } @Override public void remove() { iterator.remove(); } }
googleapis/google-cloud-java
37,882
java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/CreateEntityTypeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/entity_type.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2; /** * * * <pre> * The request message for * [EntityTypes.CreateEntityType][google.cloud.dialogflow.v2.EntityTypes.CreateEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.CreateEntityTypeRequest} */ public final class CreateEntityTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.CreateEntityTypeRequest) CreateEntityTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateEntityTypeRequest.newBuilder() to construct. private CreateEntityTypeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateEntityTypeRequest() { parent_ = ""; languageCode_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateEntityTypeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_CreateEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_CreateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.class, com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTITY_TYPE_FIELD_NUMBER = 2; private com.google.cloud.dialogflow.v2.EntityType entityType_; /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entityType field is set. */ @java.lang.Override public boolean hasEntityType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entityType. */ @java.lang.Override public com.google.cloud.dialogflow.v2.EntityType getEntityType() { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2.EntityTypeOrBuilder getEntityTypeOrBuilder() { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } public static final int LANGUAGE_CODE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object languageCode_ = ""; /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The languageCode. */ @java.lang.Override public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } } /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for languageCode. */ @java.lang.Override public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getEntityType()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, languageCode_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEntityType()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, languageCode_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.CreateEntityTypeRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.CreateEntityTypeRequest other = (com.google.cloud.dialogflow.v2.CreateEntityTypeRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasEntityType() != other.hasEntityType()) return false; if (hasEntityType()) { if (!getEntityType().equals(other.getEntityType())) return false; } if (!getLanguageCode().equals(other.getLanguageCode())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasEntityType()) { hash = (37 * hash) + ENTITY_TYPE_FIELD_NUMBER; hash = (53 * hash) + getEntityType().hashCode(); } hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER; hash = (53 * hash) + getLanguageCode().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2.CreateEntityTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [EntityTypes.CreateEntityType][google.cloud.dialogflow.v2.EntityTypes.CreateEntityType]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.CreateEntityTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.CreateEntityTypeRequest) com.google.cloud.dialogflow.v2.CreateEntityTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_CreateEntityTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_CreateEntityTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.class, com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntityTypeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; entityType_ = null; if (entityTypeBuilder_ != null) { entityTypeBuilder_.dispose(); entityTypeBuilder_ = null; } languageCode_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_CreateEntityTypeRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.CreateEntityTypeRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.CreateEntityTypeRequest build() { com.google.cloud.dialogflow.v2.CreateEntityTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.CreateEntityTypeRequest buildPartial() { com.google.cloud.dialogflow.v2.CreateEntityTypeRequest result = new com.google.cloud.dialogflow.v2.CreateEntityTypeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2.CreateEntityTypeRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.entityType_ = entityTypeBuilder_ == null ? entityType_ : entityTypeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.languageCode_ = languageCode_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.CreateEntityTypeRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.CreateEntityTypeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.CreateEntityTypeRequest other) { if (other == com.google.cloud.dialogflow.v2.CreateEntityTypeRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasEntityType()) { mergeEntityType(other.getEntityType()); } if (!other.getLanguageCode().isEmpty()) { languageCode_ = other.languageCode_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getEntityTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { languageCode_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The agent to create a entity type for. * Format: `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dialogflow.v2.EntityType entityType_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder> entityTypeBuilder_; /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entityType field is set. */ public boolean hasEntityType() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entityType. */ public com.google.cloud.dialogflow.v2.EntityType getEntityType() { if (entityTypeBuilder_ == null) { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } else { return entityTypeBuilder_.getMessage(); } } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntityType(com.google.cloud.dialogflow.v2.EntityType value) { if (entityTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entityType_ = value; } else { entityTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntityType( com.google.cloud.dialogflow.v2.EntityType.Builder builderForValue) { if (entityTypeBuilder_ == null) { entityType_ = builderForValue.build(); } else { entityTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEntityType(com.google.cloud.dialogflow.v2.EntityType value) { if (entityTypeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && entityType_ != null && entityType_ != com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance()) { getEntityTypeBuilder().mergeFrom(value); } else { entityType_ = value; } } else { entityTypeBuilder_.mergeFrom(value); } if (entityType_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEntityType() { bitField0_ = (bitField0_ & ~0x00000002); entityType_ = null; if (entityTypeBuilder_ != null) { entityTypeBuilder_.dispose(); entityTypeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.EntityType.Builder getEntityTypeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEntityTypeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2.EntityTypeOrBuilder getEntityTypeOrBuilder() { if (entityTypeBuilder_ != null) { return entityTypeBuilder_.getMessageOrBuilder(); } else { return entityType_ == null ? com.google.cloud.dialogflow.v2.EntityType.getDefaultInstance() : entityType_; } } /** * * * <pre> * Required. The entity type to create. * </pre> * * <code> * .google.cloud.dialogflow.v2.EntityType entity_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder> getEntityTypeFieldBuilder() { if (entityTypeBuilder_ == null) { entityTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2.EntityType, com.google.cloud.dialogflow.v2.EntityType.Builder, com.google.cloud.dialogflow.v2.EntityTypeOrBuilder>( getEntityType(), getParentForChildren(), isClean()); entityType_ = null; } return entityTypeBuilder_; } private java.lang.Object languageCode_ = ""; /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The languageCode. */ public java.lang.String getLanguageCode() { java.lang.Object ref = languageCode_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); languageCode_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for languageCode. */ public com.google.protobuf.ByteString getLanguageCodeBytes() { java.lang.Object ref = languageCode_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); languageCode_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCode(java.lang.String value) { if (value == null) { throw new NullPointerException(); } languageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearLanguageCode() { languageCode_ = getDefaultInstance().getLanguageCode(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. The language used to access language-specific data. * If not specified, the agent's default language is used. * For more information, see * [Multilingual intent and entity * data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity). * </pre> * * <code>string language_code = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for languageCode to set. * @return This builder for chaining. */ public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); languageCode_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.CreateEntityTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.CreateEntityTypeRequest) private static final com.google.cloud.dialogflow.v2.CreateEntityTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.CreateEntityTypeRequest(); } public static com.google.cloud.dialogflow.v2.CreateEntityTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateEntityTypeRequest> PARSER = new com.google.protobuf.AbstractParser<CreateEntityTypeRequest>() { @java.lang.Override public CreateEntityTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateEntityTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateEntityTypeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.CreateEntityTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,885
java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/SearchBlueprintRevisionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/telcoautomation/v1/telcoautomation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.telcoautomation.v1; /** * * * <pre> * Response object for `SearchBlueprintRevisions`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse} */ public final class SearchBlueprintRevisionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) SearchBlueprintRevisionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SearchBlueprintRevisionsResponse.newBuilder() to construct. private SearchBlueprintRevisionsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchBlueprintRevisionsResponse() { blueprints_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchBlueprintRevisionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_SearchBlueprintRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_SearchBlueprintRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse.class, com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse.Builder.class); } public static final int BLUEPRINTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> blueprints_; /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> getBlueprintsList() { return blueprints_; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.telcoautomation.v1.BlueprintOrBuilder> getBlueprintsOrBuilderList() { return blueprints_; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ @java.lang.Override public int getBlueprintsCount() { return blueprints_.size(); } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ @java.lang.Override public com.google.cloud.telcoautomation.v1.Blueprint getBlueprints(int index) { return blueprints_.get(index); } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ @java.lang.Override public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintsOrBuilder(int index) { return blueprints_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < blueprints_.size(); i++) { output.writeMessage(1, blueprints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < blueprints_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, blueprints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse)) { return super.equals(obj); } com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse other = (com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) obj; if (!getBlueprintsList().equals(other.getBlueprintsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBlueprintsCount() > 0) { hash = (37 * hash) + BLUEPRINTS_FIELD_NUMBER; hash = (53 * hash) + getBlueprintsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response object for `SearchBlueprintRevisions`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_SearchBlueprintRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_SearchBlueprintRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse.class, com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse.Builder.class); } // Construct using // com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (blueprintsBuilder_ == null) { blueprints_ = java.util.Collections.emptyList(); } else { blueprints_ = null; blueprintsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_SearchBlueprintRevisionsResponse_descriptor; } @java.lang.Override public com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse getDefaultInstanceForType() { return com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse build() { com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse buildPartial() { com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse result = new com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse result) { if (blueprintsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { blueprints_ = java.util.Collections.unmodifiableList(blueprints_); bitField0_ = (bitField0_ & ~0x00000001); } result.blueprints_ = blueprints_; } else { result.blueprints_ = blueprintsBuilder_.build(); } } private void buildPartial0( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) { return mergeFrom( (com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse other) { if (other == com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse .getDefaultInstance()) return this; if (blueprintsBuilder_ == null) { if (!other.blueprints_.isEmpty()) { if (blueprints_.isEmpty()) { blueprints_ = other.blueprints_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBlueprintsIsMutable(); blueprints_.addAll(other.blueprints_); } onChanged(); } } else { if (!other.blueprints_.isEmpty()) { if (blueprintsBuilder_.isEmpty()) { blueprintsBuilder_.dispose(); blueprintsBuilder_ = null; blueprints_ = other.blueprints_; bitField0_ = (bitField0_ & ~0x00000001); blueprintsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBlueprintsFieldBuilder() : null; } else { blueprintsBuilder_.addAllMessages(other.blueprints_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.telcoautomation.v1.Blueprint m = input.readMessage( com.google.cloud.telcoautomation.v1.Blueprint.parser(), extensionRegistry); if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); blueprints_.add(m); } else { blueprintsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> blueprints_ = java.util.Collections.emptyList(); private void ensureBlueprintsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { blueprints_ = new java.util.ArrayList<com.google.cloud.telcoautomation.v1.Blueprint>(blueprints_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Blueprint, com.google.cloud.telcoautomation.v1.Blueprint.Builder, com.google.cloud.telcoautomation.v1.BlueprintOrBuilder> blueprintsBuilder_; /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> getBlueprintsList() { if (blueprintsBuilder_ == null) { return java.util.Collections.unmodifiableList(blueprints_); } else { return blueprintsBuilder_.getMessageList(); } } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public int getBlueprintsCount() { if (blueprintsBuilder_ == null) { return blueprints_.size(); } else { return blueprintsBuilder_.getCount(); } } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public com.google.cloud.telcoautomation.v1.Blueprint getBlueprints(int index) { if (blueprintsBuilder_ == null) { return blueprints_.get(index); } else { return blueprintsBuilder_.getMessage(index); } } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder setBlueprints(int index, com.google.cloud.telcoautomation.v1.Blueprint value) { if (blueprintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlueprintsIsMutable(); blueprints_.set(index, value); onChanged(); } else { blueprintsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder setBlueprints( int index, com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) { if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); blueprints_.set(index, builderForValue.build()); onChanged(); } else { blueprintsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder addBlueprints(com.google.cloud.telcoautomation.v1.Blueprint value) { if (blueprintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlueprintsIsMutable(); blueprints_.add(value); onChanged(); } else { blueprintsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder addBlueprints(int index, com.google.cloud.telcoautomation.v1.Blueprint value) { if (blueprintsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlueprintsIsMutable(); blueprints_.add(index, value); onChanged(); } else { blueprintsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder addBlueprints( com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) { if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); blueprints_.add(builderForValue.build()); onChanged(); } else { blueprintsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder addBlueprints( int index, com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) { if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); blueprints_.add(index, builderForValue.build()); onChanged(); } else { blueprintsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder addAllBlueprints( java.lang.Iterable<? extends com.google.cloud.telcoautomation.v1.Blueprint> values) { if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, blueprints_); onChanged(); } else { blueprintsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder clearBlueprints() { if (blueprintsBuilder_ == null) { blueprints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { blueprintsBuilder_.clear(); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public Builder removeBlueprints(int index) { if (blueprintsBuilder_ == null) { ensureBlueprintsIsMutable(); blueprints_.remove(index); onChanged(); } else { blueprintsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public com.google.cloud.telcoautomation.v1.Blueprint.Builder getBlueprintsBuilder(int index) { return getBlueprintsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintsOrBuilder( int index) { if (blueprintsBuilder_ == null) { return blueprints_.get(index); } else { return blueprintsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public java.util.List<? extends com.google.cloud.telcoautomation.v1.BlueprintOrBuilder> getBlueprintsOrBuilderList() { if (blueprintsBuilder_ != null) { return blueprintsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(blueprints_); } } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public com.google.cloud.telcoautomation.v1.Blueprint.Builder addBlueprintsBuilder() { return getBlueprintsFieldBuilder() .addBuilder(com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()); } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public com.google.cloud.telcoautomation.v1.Blueprint.Builder addBlueprintsBuilder(int index) { return getBlueprintsFieldBuilder() .addBuilder(index, com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance()); } /** * * * <pre> * The list of requested blueprint revisions. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code> */ public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint.Builder> getBlueprintsBuilderList() { return getBlueprintsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Blueprint, com.google.cloud.telcoautomation.v1.Blueprint.Builder, com.google.cloud.telcoautomation.v1.BlueprintOrBuilder> getBlueprintsFieldBuilder() { if (blueprintsBuilder_ == null) { blueprintsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Blueprint, com.google.cloud.telcoautomation.v1.Blueprint.Builder, com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>( blueprints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); blueprints_ = null; } return blueprintsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse) private static final com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse(); } public static com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchBlueprintRevisionsResponse> PARSER = new com.google.protobuf.AbstractParser<SearchBlueprintRevisionsResponse>() { @java.lang.Override public SearchBlueprintRevisionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchBlueprintRevisionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchBlueprintRevisionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.telcoautomation.v1.SearchBlueprintRevisionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,916
java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListEntitlementChangesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/channel/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.channel.v1; /** * * * <pre> * Response message for * [CloudChannelService.ListEntitlementChanges][google.cloud.channel.v1.CloudChannelService.ListEntitlementChanges] * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListEntitlementChangesResponse} */ public final class ListEntitlementChangesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListEntitlementChangesResponse) ListEntitlementChangesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEntitlementChangesResponse.newBuilder() to construct. private ListEntitlementChangesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEntitlementChangesResponse() { entitlementChanges_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEntitlementChangesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementChangesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementChangesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListEntitlementChangesResponse.class, com.google.cloud.channel.v1.ListEntitlementChangesResponse.Builder.class); } public static final int ENTITLEMENT_CHANGES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.channel.v1.EntitlementChange> entitlementChanges_; /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.channel.v1.EntitlementChange> getEntitlementChangesList() { return entitlementChanges_; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.channel.v1.EntitlementChangeOrBuilder> getEntitlementChangesOrBuilderList() { return entitlementChanges_; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ @java.lang.Override public int getEntitlementChangesCount() { return entitlementChanges_.size(); } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementChange getEntitlementChanges(int index) { return entitlementChanges_.get(index); } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ @java.lang.Override public com.google.cloud.channel.v1.EntitlementChangeOrBuilder getEntitlementChangesOrBuilder( int index) { return entitlementChanges_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < entitlementChanges_.size(); i++) { output.writeMessage(1, entitlementChanges_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < entitlementChanges_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, entitlementChanges_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.channel.v1.ListEntitlementChangesResponse)) { return super.equals(obj); } com.google.cloud.channel.v1.ListEntitlementChangesResponse other = (com.google.cloud.channel.v1.ListEntitlementChangesResponse) obj; if (!getEntitlementChangesList().equals(other.getEntitlementChangesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEntitlementChangesCount() > 0) { hash = (37 * hash) + ENTITLEMENT_CHANGES_FIELD_NUMBER; hash = (53 * hash) + getEntitlementChangesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.channel.v1.ListEntitlementChangesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [CloudChannelService.ListEntitlementChanges][google.cloud.channel.v1.CloudChannelService.ListEntitlementChanges] * </pre> * * Protobuf type {@code google.cloud.channel.v1.ListEntitlementChangesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListEntitlementChangesResponse) com.google.cloud.channel.v1.ListEntitlementChangesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementChangesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementChangesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.channel.v1.ListEntitlementChangesResponse.class, com.google.cloud.channel.v1.ListEntitlementChangesResponse.Builder.class); } // Construct using com.google.cloud.channel.v1.ListEntitlementChangesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (entitlementChangesBuilder_ == null) { entitlementChanges_ = java.util.Collections.emptyList(); } else { entitlementChanges_ = null; entitlementChangesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.channel.v1.ServiceProto .internal_static_google_cloud_channel_v1_ListEntitlementChangesResponse_descriptor; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementChangesResponse getDefaultInstanceForType() { return com.google.cloud.channel.v1.ListEntitlementChangesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementChangesResponse build() { com.google.cloud.channel.v1.ListEntitlementChangesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementChangesResponse buildPartial() { com.google.cloud.channel.v1.ListEntitlementChangesResponse result = new com.google.cloud.channel.v1.ListEntitlementChangesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.channel.v1.ListEntitlementChangesResponse result) { if (entitlementChangesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { entitlementChanges_ = java.util.Collections.unmodifiableList(entitlementChanges_); bitField0_ = (bitField0_ & ~0x00000001); } result.entitlementChanges_ = entitlementChanges_; } else { result.entitlementChanges_ = entitlementChangesBuilder_.build(); } } private void buildPartial0(com.google.cloud.channel.v1.ListEntitlementChangesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.channel.v1.ListEntitlementChangesResponse) { return mergeFrom((com.google.cloud.channel.v1.ListEntitlementChangesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.channel.v1.ListEntitlementChangesResponse other) { if (other == com.google.cloud.channel.v1.ListEntitlementChangesResponse.getDefaultInstance()) return this; if (entitlementChangesBuilder_ == null) { if (!other.entitlementChanges_.isEmpty()) { if (entitlementChanges_.isEmpty()) { entitlementChanges_ = other.entitlementChanges_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEntitlementChangesIsMutable(); entitlementChanges_.addAll(other.entitlementChanges_); } onChanged(); } } else { if (!other.entitlementChanges_.isEmpty()) { if (entitlementChangesBuilder_.isEmpty()) { entitlementChangesBuilder_.dispose(); entitlementChangesBuilder_ = null; entitlementChanges_ = other.entitlementChanges_; bitField0_ = (bitField0_ & ~0x00000001); entitlementChangesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEntitlementChangesFieldBuilder() : null; } else { entitlementChangesBuilder_.addAllMessages(other.entitlementChanges_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.channel.v1.EntitlementChange m = input.readMessage( com.google.cloud.channel.v1.EntitlementChange.parser(), extensionRegistry); if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); entitlementChanges_.add(m); } else { entitlementChangesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.channel.v1.EntitlementChange> entitlementChanges_ = java.util.Collections.emptyList(); private void ensureEntitlementChangesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { entitlementChanges_ = new java.util.ArrayList<com.google.cloud.channel.v1.EntitlementChange>( entitlementChanges_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.EntitlementChange, com.google.cloud.channel.v1.EntitlementChange.Builder, com.google.cloud.channel.v1.EntitlementChangeOrBuilder> entitlementChangesBuilder_; /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.EntitlementChange> getEntitlementChangesList() { if (entitlementChangesBuilder_ == null) { return java.util.Collections.unmodifiableList(entitlementChanges_); } else { return entitlementChangesBuilder_.getMessageList(); } } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public int getEntitlementChangesCount() { if (entitlementChangesBuilder_ == null) { return entitlementChanges_.size(); } else { return entitlementChangesBuilder_.getCount(); } } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public com.google.cloud.channel.v1.EntitlementChange getEntitlementChanges(int index) { if (entitlementChangesBuilder_ == null) { return entitlementChanges_.get(index); } else { return entitlementChangesBuilder_.getMessage(index); } } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder setEntitlementChanges( int index, com.google.cloud.channel.v1.EntitlementChange value) { if (entitlementChangesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementChangesIsMutable(); entitlementChanges_.set(index, value); onChanged(); } else { entitlementChangesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder setEntitlementChanges( int index, com.google.cloud.channel.v1.EntitlementChange.Builder builderForValue) { if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); entitlementChanges_.set(index, builderForValue.build()); onChanged(); } else { entitlementChangesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder addEntitlementChanges(com.google.cloud.channel.v1.EntitlementChange value) { if (entitlementChangesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementChangesIsMutable(); entitlementChanges_.add(value); onChanged(); } else { entitlementChangesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder addEntitlementChanges( int index, com.google.cloud.channel.v1.EntitlementChange value) { if (entitlementChangesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEntitlementChangesIsMutable(); entitlementChanges_.add(index, value); onChanged(); } else { entitlementChangesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder addEntitlementChanges( com.google.cloud.channel.v1.EntitlementChange.Builder builderForValue) { if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); entitlementChanges_.add(builderForValue.build()); onChanged(); } else { entitlementChangesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder addEntitlementChanges( int index, com.google.cloud.channel.v1.EntitlementChange.Builder builderForValue) { if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); entitlementChanges_.add(index, builderForValue.build()); onChanged(); } else { entitlementChangesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder addAllEntitlementChanges( java.lang.Iterable<? extends com.google.cloud.channel.v1.EntitlementChange> values) { if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entitlementChanges_); onChanged(); } else { entitlementChangesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder clearEntitlementChanges() { if (entitlementChangesBuilder_ == null) { entitlementChanges_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { entitlementChangesBuilder_.clear(); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public Builder removeEntitlementChanges(int index) { if (entitlementChangesBuilder_ == null) { ensureEntitlementChangesIsMutable(); entitlementChanges_.remove(index); onChanged(); } else { entitlementChangesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public com.google.cloud.channel.v1.EntitlementChange.Builder getEntitlementChangesBuilder( int index) { return getEntitlementChangesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public com.google.cloud.channel.v1.EntitlementChangeOrBuilder getEntitlementChangesOrBuilder( int index) { if (entitlementChangesBuilder_ == null) { return entitlementChanges_.get(index); } else { return entitlementChangesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public java.util.List<? extends com.google.cloud.channel.v1.EntitlementChangeOrBuilder> getEntitlementChangesOrBuilderList() { if (entitlementChangesBuilder_ != null) { return entitlementChangesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(entitlementChanges_); } } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public com.google.cloud.channel.v1.EntitlementChange.Builder addEntitlementChangesBuilder() { return getEntitlementChangesFieldBuilder() .addBuilder(com.google.cloud.channel.v1.EntitlementChange.getDefaultInstance()); } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public com.google.cloud.channel.v1.EntitlementChange.Builder addEntitlementChangesBuilder( int index) { return getEntitlementChangesFieldBuilder() .addBuilder(index, com.google.cloud.channel.v1.EntitlementChange.getDefaultInstance()); } /** * * * <pre> * The list of entitlement changes. * </pre> * * <code>repeated .google.cloud.channel.v1.EntitlementChange entitlement_changes = 1;</code> */ public java.util.List<com.google.cloud.channel.v1.EntitlementChange.Builder> getEntitlementChangesBuilderList() { return getEntitlementChangesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.EntitlementChange, com.google.cloud.channel.v1.EntitlementChange.Builder, com.google.cloud.channel.v1.EntitlementChangeOrBuilder> getEntitlementChangesFieldBuilder() { if (entitlementChangesBuilder_ == null) { entitlementChangesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.channel.v1.EntitlementChange, com.google.cloud.channel.v1.EntitlementChange.Builder, com.google.cloud.channel.v1.EntitlementChangeOrBuilder>( entitlementChanges_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); entitlementChanges_ = null; } return entitlementChangesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to list the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListEntitlementChangesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListEntitlementChangesResponse) private static final com.google.cloud.channel.v1.ListEntitlementChangesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListEntitlementChangesResponse(); } public static com.google.cloud.channel.v1.ListEntitlementChangesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEntitlementChangesResponse> PARSER = new com.google.protobuf.AbstractParser<ListEntitlementChangesResponse>() { @java.lang.Override public ListEntitlementChangesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEntitlementChangesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEntitlementChangesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.channel.v1.ListEntitlementChangesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,149
java-maps-places/proto-google-maps-places-v1/src/main/java/com/google/maps/places/v1/PlaceProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/maps/places/v1/place.proto // Protobuf Java Version: 3.25.8 package com.google.maps.places.v1; public final class PlaceProto { private PlaceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_AddressComponent_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_AddressComponent_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_PlusCode_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_PlusCode_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_OpeningHours_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_OpeningHours_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_OpeningHours_Period_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_OpeningHours_Period_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_OpeningHours_Period_Point_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_OpeningHours_Period_Point_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_OpeningHours_SpecialDay_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_OpeningHours_SpecialDay_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_Attribution_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_Attribution_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_PaymentOptions_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_PaymentOptions_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_ParkingOptions_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_ParkingOptions_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_SubDestination_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_SubDestination_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_AccessibilityOptions_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_AccessibilityOptions_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_GenerativeSummary_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_GenerativeSummary_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_ContainingPlace_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_ContainingPlace_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_ReviewSummary_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_ReviewSummary_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_EvChargeAmenitySummary_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_EvChargeAmenitySummary_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_maps_places_v1_Place_NeighborhoodSummary_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_maps_places_v1_Place_NeighborhoodSummary_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "!google/maps/places/v1/place.proto\022\025goo" + "gle.maps.places.v1\032\031google/api/resource." + "proto\032\036google/geo/type/viewport.proto\032.g" + "oogle/maps/places/v1/address_descriptor.proto\032)google/maps/places/v1/content_blo" + "ck.proto\032\'google/maps/places/v1/ev_charg" + "ing.proto\032(google/maps/places/v1/fuel_options.proto\032!google/maps/places/v1/photo" + ".proto\032\'google/maps/places/v1/price_rang" + "e.proto\032\"google/maps/places/v1/review.pr" + "oto\032\037google/protobuf/timestamp.proto\032\026go" + "ogle/type/date.proto\032\032google/type/datetime.proto\032\030google/type/latlng.proto\032" + " google/type/localized_text.proto\032 google/type/postal_address.proto\"\3536\n" + "\005Place\022\014\n" + "\004name\030\001 \001(\t\022\n\n" + "\002id\030\002 \001(\t\0220\n" + "\014display_name\030\037 \001(\0132\032.google.type.LocalizedText\022\r\n" + "\005types\030\005 \003(\t\022\024\n" + "\014primary_type\0302 \001(\t\022=\n" + "\031primary_type_display_name\030 \001(\0132\032.google.type.LocalizedText\022\035\n" + "\025national_phone_number\030\007 \001(\t\022\"\n" + "\032international_phone_number\030\010 \001(\t\022\031\n" + "\021formatted_address\030\t \001(\t\022\037\n" + "\027short_formatted_address\0303 \001(\t\0222\n" + "\016postal_address\030Z \001(\0132\032.google.type.PostalAddress\022I\n" + "\022address_components\030\n" + " \003(\0132-.google.maps.places.v1.Place.AddressComponent\0228\n" + "\tplus_code\030\013 \001(\0132%.google.maps.places.v1.Place.PlusCode\022%\n" + "\010location\030\014 \001(\0132\023.google.type.LatLng\022+\n" + "\010viewport\030\r" + " \001(\0132\031.google.geo.type.Viewport\022\016\n" + "\006rating\030\016 \001(\001\022\027\n" + "\017google_maps_uri\030\017 \001(\t\022\023\n" + "\013website_uri\030\020 \001(\t\022.\n" + "\007reviews\0305 \003(\0132\035.google.maps.places.v1.Review\022H\n" + "\025regular_opening_hours\030\025" + " \001(\0132).google.maps.places.v1.Place.OpeningHours\022\037\n" + "\022utc_offset_minutes\030\026 \001(\005H\000\210\001\001\022(\n" + "\ttime_zone\030X \001(\0132\025.google.type.TimeZone\022,\n" + "\006photos\0306 \003(\0132\034.google.maps.places.v1.Photo\022\032\n" + "\022adr_format_address\030\030 \001(\t\022D\n" + "\017business_status\030\031 \001" + "(\0162+.google.maps.places.v1.Place.BusinessStatus\0226\n" + "\013price_level\030\032 \001(\0162!.google.maps.places.v1.PriceLevel\022>\n" + "\014attributions\030\033 \003(\0132(.google.maps.places.v1.Place.Attribution\022\036\n" + "\021user_rating_count\030\034 \001(\005H\001\210\001\001\022\032\n" + "\022icon_mask_base_uri\030\035 \001(\t\022\035\n" + "\025icon_background_color\030\036 \001(\t\022\024\n" + "\007takeout\030! \001(\010H\002\210\001\001\022\025\n" + "\010delivery\030\" \001(\010H\003\210\001\001\022\024\n" + "\007dine_in\030# \001(\010H\004\210\001\001\022\034\n" + "\017curbside_pickup\030$ \001(\010H\005\210\001\001\022\027\n\n" + "reservable\030& \001(\010H\006\210\001\001\022\035\n" + "\020serves_breakfast\030\' \001(\010H\007\210\001\001\022\031\n" + "\014serves_lunch\030( \001(\010H\010\210\001\001\022\032\n\r" + "serves_dinner\030) \001(\010H\t\210\001\001\022\030\n" + "\013serves_beer\030* \001(\010H\n" + "\210\001\001\022\030\n" + "\013serves_wine\030+ \001(\010H\013\210\001\001\022\032\n\r" + "serves_brunch\030, \001(\010H\014\210\001\001\022#\n" + "\026serves_vegetarian_food\030- \001(\010H\r" + "\210\001\001\022H\n" + "\025current_opening_hours\030." + " \001(\0132).google.maps.places.v1.Place.OpeningHours\022R\n" + "\037current_secondary_opening_hours\030/" + " \003(\0132).google.maps.places.v1.Place.OpeningHours\022R\n" + "\037regular_secondary_opening_hours\0301" + " \003(\0132).google.maps.places.v1.Place.OpeningHours\0225\n" + "\021editorial_summary\0304 \001(\0132\032.google.type.LocalizedText\022\034\n" + "\017outdoor_seating\0307 \001(\010H\016\210\001\001\022\027\n\n" + "live_music\0308 \001(\010H\017\210\001\001\022\036\n" + "\021menu_for_children\0309 \001(\010H\020\210\001\001\022\035\n" + "\020serves_cocktails\030: \001(\010H\021\210\001\001\022\033\n" + "\016serves_dessert\030; \001(\010H\022\210\001\001\022\032\n\r" + "serves_coffee\030< \001(\010H\023\210\001\001\022\036\n" + "\021good_for_children\030> \001(\010H\024\210\001\001\022\030\n" + "\013allows_dogs\030? \001(\010H\025\210\001\001\022\025\n" + "\010restroom\030@ \001(\010H\026\210\001\001\022\034\n" + "\017good_for_groups\030A \001(\010H\027\210\001\001\022%\n" + "\030good_for_watching_sports\030B \001(\010H\030\210\001\001\022D\n" + "\017payment_options\030C \001(\0132+.google.maps.places.v1.Place.PaymentOptions\022D\n" + "\017parking_options\030F \001(\0132+.google.maps.places.v1.Place.ParkingOptions\022E\n" + "\020sub_destinations\030G" + " \003(\0132+.google.maps.places.v1.Place.SubDestination\022U\n" + "\025accessibility_options\030H" + " \001(\01321.google.maps.places.v1.Place.AccessibilityOptionsH\031\210\001\001\0228\n" + "\014fuel_options\030N \001(\0132\".google.maps.places.v1.FuelOptions\022A\n" + "\021ev_charge_options\030O \001(\0132&.google.maps.places.v1.EVChargeOptions\022J\n" + "\022generative_summary\030P" + " \001(\0132..google.maps.places.v1.Place.GenerativeSummary\022G\n" + "\021containing_places\030R" + " \003(\0132,.google.maps.places.v1.Place.ContainingPlace\022\'\n" + "\032pure_service_area_business\030S \001(\010H\032\210\001\001\022D\n" + "\022address_descriptor\030T \001(\0132(.google.maps.places.v1.AddressDescriptor\0226\n" + "\013price_range\030V \001(\0132!.google.maps.places.v1.PriceRange\022B\n" + "\016review_summary\030W \001(\0132*.google.maps.places.v1.Place.ReviewSummary\022V\n" + "\031ev_charge_amenity_summary\030Y" + " \001(\01323.google.maps.places.v1.Place.EvChargeAmenitySummary\022N\n" + "\024neighborhood_summary\030[" + " \001(\01320.google.maps.places.v1.Place.NeighborhoodSummary\032_\n" + "\020AddressComponent\022\021\n" + "\tlong_text\030\001 \001(\t\022\022\n\n" + "short_text\030\002 \001(\t\022\r\n" + "\005types\030\003 \003(\t\022\025\n\r" + "language_code\030\004 \001(\t\0326\n" + "\010PlusCode\022\023\n" + "\013global_code\030\001 \001(\t\022\025\n\r" + "compound_code\030\002 \001(\t\032\375\007\n" + "\014OpeningHours\022\025\n" + "\010open_now\030\001 \001(\010H\000\210\001\001\022A\n" + "\007periods\030\002 \003(\01320.google." + "maps.places.v1.Place.OpeningHours.Period\022\034\n" + "\024weekday_descriptions\030\003 \003(\t\022Z\n" + "\024secondary_hours_type\030\004 \001(\0162<.google.maps.place" + "s.v1.Place.OpeningHours.SecondaryHoursType\022J\n" + "\014special_days\030\005 \003(\01324.google.maps.p" + "laces.v1.Place.OpeningHours.SpecialDay\0222\n" + "\016next_open_time\030\006 \001(\0132\032.google.protobuf.Timestamp\0223\n" + "\017next_close_time\030\007 \001(\0132\032.google.protobuf.Timestamp\032\251\002\n" + "\006Period\022D\n" + "\004open\030\001" + " \001(\01326.google.maps.places.v1.Place.OpeningHours.Period.Point\022E\n" + "\005close\030\002 \001(\0132" + "6.google.maps.places.v1.Place.OpeningHours.Period.Point\032\221\001\n" + "\005Point\022\020\n" + "\003day\030\001 \001(\005H\000\210\001\001\022\021\n" + "\004hour\030\002 \001(\005H\001\210\001\001\022\023\n" + "\006minute\030\003 \001(\005H\002\210\001\001\022\037\n" + "\004date\030\006 \001(\0132\021.google.type.Date\022\021\n" + "\ttruncated\030\005 \001(\010B\006\n" + "\004_dayB\007\n" + "\005_hourB\t\n" + "\007_minute\032-\n\n" + "SpecialDay\022\037\n" + "\004date\030\001 \001(\0132\021.google.type.Date\"\373\001\n" + "\022SecondaryHoursType\022$\n" + " SECONDARY_HOURS_TYPE_UNSPECIFIED\020\000\022\021\n\r" + "DRIVE_THROUGH\020\001\022\016\n\n" + "HAPPY_HOUR\020\002\022\014\n" + "\010DELIVERY\020\003\022\013\n" + "\007TAKEOUT\020\004\022\013\n" + "\007KITCHEN\020\005\022\r\n" + "\tBREAKFAST\020\006\022\t\n" + "\005LUNCH\020\007\022\n\n" + "\006DINNER\020\010\022\n\n" + "\006BRUNCH\020\t\022\n\n" + "\006PICKUP\020\n" + "\022\n\n" + "\006ACCESS\020\013\022\020\n" + "\014SENIOR_HOURS\020\014\022\030\n" + "\024ONLINE_SERVICE_HOURS\020\r" + "B\013\n" + "\t_open_now\0325\n" + "\013Attribution\022\020\n" + "\010provider\030\001 \001(\t\022\024\n" + "\014provider_uri\030\002 \001(\t\032\346\001\n" + "\016PaymentOptions\022!\n" + "\024accepts_credit_cards\030\001 \001(\010H\000\210\001\001\022 \n" + "\023accepts_debit_cards\030\002 \001(\010H\001\210\001\001\022\036\n" + "\021accepts_cash_only\030\003 \001(\010H\002\210\001\001\022\030\n" + "\013accepts_nfc\030\004 \001(\010H\003\210\001\001B\027\n" + "\025_accepts_credit_cardsB\026\n" + "\024_accepts_debit_cardsB\024\n" + "\022_accepts_cash_onlyB\016\n" + "\014_accepts_nfc\032\216\003\n" + "\016ParkingOptions\022\035\n" + "\020free_parking_lot\030\001 \001(\010H\000\210\001\001\022\035\n" + "\020paid_parking_lot\030\002 \001(\010H\001\210\001\001\022 \n" + "\023free_street_parking\030\003 \001(\010H\002\210\001\001\022 \n" + "\023paid_street_parking\030\004 \001(\010H\003\210\001\001\022\032\n\r" + "valet_parking\030\005 \001(\010H\004\210\001\001\022 \n" + "\023free_garage_parking\030\006 \001(\010H\005\210\001\001\022 \n" + "\023paid_garage_parking\030\007 \001(\010H\006\210\001\001B\023\n" + "\021_free_parking_lotB\023\n" + "\021_paid_parking_lotB\026\n" + "\024_free_street_parkingB\026\n" + "\024_paid_street_parkingB\020\n" + "\016_valet_parkingB\026\n" + "\024_free_garage_parkingB\026\n" + "\024_paid_garage_parking\032L\n" + "\016SubDestination\022.\n" + "\004name\030\001 \001(\tB \372A\035\n" + "\033places.googleapis.com/Place\022\n\n" + "\002id\030\002 \001(\t\032\322\002\n" + "\024AccessibilityOptions\022*\n" + "\035wheelchair_accessible_parking\030\001 \001(\010H\000\210\001\001\022+\n" + "\036wheelchair_accessible_entrance\030\002 \001(\010H\001\210\001\001\022+\n" + "\036wheelchair_accessible_restroom\030\003 \001(\010H\002\210\001\001\022*\n" + "\035wheelchair_accessible_seating\030\004 \001(\010H\003\210\001\001B \n" + "\036_wheelchair_accessible_parkingB!\n" + "\037_wheelchair_accessible_entranceB!\n" + "\037_wheelchair_accessible_restroomB \n" + "\036_wheelchair_accessible_seating\032\231\001\n" + "\021GenerativeSummary\022,\n" + "\010overview\030\001 \001(\0132\032.google.type.LocalizedText\022!\n" + "\031overview_flag_content_uri\030\004 \001(\t\0223\n" + "\017disclosure_text\030\006 \001(\0132\032.google.type.LocalizedText\032M\n" + "\017ContainingPlace\022.\n" + "\004name\030\001 \001(\tB \372A\035\n" + "\033places.googleapis.com/Place\022\n\n" + "\002id\030\002 \001(\t\032\210\001\n\r" + "ReviewSummary\022(\n" + "\004text\030\001 \001(\0132\032.google.type.LocalizedText\022\030\n" + "\020flag_content_uri\030\002 \001(\t\0223\n" + "\017disclosure_text\030\003 \001(\0132\032.google.type.LocalizedText\032\300\002\n" + "\026EvChargeAmenitySummary\0225\n" + "\010overview\030\001 \001(\0132#.google.maps.places.v1.ContentBlock\0223\n" + "\006coffee\030\002 \001(\0132#.google.maps.places.v1.ContentBlock\0227\n\n" + "restaurant\030\003 \001(\0132#.google.maps.places.v1.ContentBlock\0222\n" + "\005store\030\004 \001(\0132#.google.maps.places.v1.ContentBlock\022\030\n" + "\020flag_content_uri\030\005 \001(\t\0223\n" + "\017disclosure_text\030\006 \001(\0132\032.google.type.LocalizedText\032\325\001\n" + "\023NeighborhoodSummary\0225\n" + "\010overview\030\001 \001(\0132#.google.maps.places.v1.ContentBlock\0228\n" + "\013description\030\002 \001(\0132#.google.maps.places.v1.ContentBlock\022\030\n" + "\020flag_content_uri\030\003 \001(\t\0223\n" + "\017disclosure_text\030\004 \001(\0132\032.google.type.LocalizedText\"r\n" + "\016BusinessStatus\022\037\n" + "\033BUSINESS_STATUS_UNSPECIFIED\020\000\022\017\n" + "\013OPERATIONAL\020\001\022\026\n" + "\022CLOSED_TEMPORARILY\020\002\022\026\n" + "\022CLOSED_PERMANENTLY\020\003:B\352A?\n" + "\033places.googleapis.com/Place\022\021places/{place_id}*\006places2\005placeB\025\n" + "\023_utc_offset_minutesB\024\n" + "\022_user_rating_countB\n\n" + "\010_takeoutB\013\n" + "\t_deliveryB\n\n" + "\010_dine_inB\022\n" + "\020_curbside_pickupB\r\n" + "\013_reservableB\023\n" + "\021_serves_breakfastB\017\n\r" + "_serves_lunchB\020\n" + "\016_serves_dinnerB\016\n" + "\014_serves_beerB\016\n" + "\014_serves_wineB\020\n" + "\016_serves_brunchB\031\n" + "\027_serves_vegetarian_foodB\022\n" + "\020_outdoor_seatingB\r\n" + "\013_live_musicB\024\n" + "\022_menu_for_childrenB\023\n" + "\021_serves_cocktailsB\021\n" + "\017_serves_dessertB\020\n" + "\016_serves_coffeeB\024\n" + "\022_good_for_childrenB\016\n" + "\014_allows_dogsB\013\n" + "\t_restroomB\022\n" + "\020_good_for_groupsB\033\n" + "\031_good_for_watching_sportsB\030\n" + "\026_accessibility_optionsB\035\n" + "\033_pure_service_area_business*\261\001\n\n" + "PriceLevel\022\033\n" + "\027PRICE_LEVEL_UNSPECIFIED\020\000\022\024\n" + "\020PRICE_LEVEL_FREE\020\001\022\033\n" + "\027PRICE_LEVEL_INEXPENSIVE\020\002\022\030\n" + "\024PRICE_LEVEL_MODERATE\020\003\022\031\n" + "\025PRICE_LEVEL_EXPENSIVE\020\004\022\036\n" + "\032PRICE_LEVEL_VERY_EXPENSIVE\020\005B\233\001\n" + "\031com.google.maps.places.v1B\n" + "PlaceProtoP\001Z7cloud.google.com/go/maps/places/apiv1/placespb;pla" + "cespb\242\002\006GMPSV1\252\002\025Google.Maps.Places.V1\312\002" + "\025Google\\Maps\\Places\\V1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.ResourceProto.getDescriptor(), com.google.geo.type.ViewportProto.getDescriptor(), com.google.maps.places.v1.AddressDescriptorProto.getDescriptor(), com.google.maps.places.v1.ContentBlockProto.getDescriptor(), com.google.maps.places.v1.EvChargingProto.getDescriptor(), com.google.maps.places.v1.FuelOptionsProto.getDescriptor(), com.google.maps.places.v1.PhotoProto.getDescriptor(), com.google.maps.places.v1.PriceRangeProto.getDescriptor(), com.google.maps.places.v1.ReviewProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.type.DateProto.getDescriptor(), com.google.type.DateTimeProto.getDescriptor(), com.google.type.LatLngProto.getDescriptor(), com.google.type.LocalizedTextProto.getDescriptor(), com.google.type.PostalAddressProto.getDescriptor(), }); internal_static_google_maps_places_v1_Place_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_maps_places_v1_Place_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_descriptor, new java.lang.String[] { "Name", "Id", "DisplayName", "Types", "PrimaryType", "PrimaryTypeDisplayName", "NationalPhoneNumber", "InternationalPhoneNumber", "FormattedAddress", "ShortFormattedAddress", "PostalAddress", "AddressComponents", "PlusCode", "Location", "Viewport", "Rating", "GoogleMapsUri", "WebsiteUri", "Reviews", "RegularOpeningHours", "UtcOffsetMinutes", "TimeZone", "Photos", "AdrFormatAddress", "BusinessStatus", "PriceLevel", "Attributions", "UserRatingCount", "IconMaskBaseUri", "IconBackgroundColor", "Takeout", "Delivery", "DineIn", "CurbsidePickup", "Reservable", "ServesBreakfast", "ServesLunch", "ServesDinner", "ServesBeer", "ServesWine", "ServesBrunch", "ServesVegetarianFood", "CurrentOpeningHours", "CurrentSecondaryOpeningHours", "RegularSecondaryOpeningHours", "EditorialSummary", "OutdoorSeating", "LiveMusic", "MenuForChildren", "ServesCocktails", "ServesDessert", "ServesCoffee", "GoodForChildren", "AllowsDogs", "Restroom", "GoodForGroups", "GoodForWatchingSports", "PaymentOptions", "ParkingOptions", "SubDestinations", "AccessibilityOptions", "FuelOptions", "EvChargeOptions", "GenerativeSummary", "ContainingPlaces", "PureServiceAreaBusiness", "AddressDescriptor", "PriceRange", "ReviewSummary", "EvChargeAmenitySummary", "NeighborhoodSummary", }); internal_static_google_maps_places_v1_Place_AddressComponent_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(0); internal_static_google_maps_places_v1_Place_AddressComponent_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_AddressComponent_descriptor, new java.lang.String[] { "LongText", "ShortText", "Types", "LanguageCode", }); internal_static_google_maps_places_v1_Place_PlusCode_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(1); internal_static_google_maps_places_v1_Place_PlusCode_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_PlusCode_descriptor, new java.lang.String[] { "GlobalCode", "CompoundCode", }); internal_static_google_maps_places_v1_Place_OpeningHours_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(2); internal_static_google_maps_places_v1_Place_OpeningHours_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_OpeningHours_descriptor, new java.lang.String[] { "OpenNow", "Periods", "WeekdayDescriptions", "SecondaryHoursType", "SpecialDays", "NextOpenTime", "NextCloseTime", }); internal_static_google_maps_places_v1_Place_OpeningHours_Period_descriptor = internal_static_google_maps_places_v1_Place_OpeningHours_descriptor.getNestedTypes().get(0); internal_static_google_maps_places_v1_Place_OpeningHours_Period_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_OpeningHours_Period_descriptor, new java.lang.String[] { "Open", "Close", }); internal_static_google_maps_places_v1_Place_OpeningHours_Period_Point_descriptor = internal_static_google_maps_places_v1_Place_OpeningHours_Period_descriptor .getNestedTypes() .get(0); internal_static_google_maps_places_v1_Place_OpeningHours_Period_Point_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_OpeningHours_Period_Point_descriptor, new java.lang.String[] { "Day", "Hour", "Minute", "Date", "Truncated", }); internal_static_google_maps_places_v1_Place_OpeningHours_SpecialDay_descriptor = internal_static_google_maps_places_v1_Place_OpeningHours_descriptor.getNestedTypes().get(1); internal_static_google_maps_places_v1_Place_OpeningHours_SpecialDay_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_OpeningHours_SpecialDay_descriptor, new java.lang.String[] { "Date", }); internal_static_google_maps_places_v1_Place_Attribution_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(3); internal_static_google_maps_places_v1_Place_Attribution_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_Attribution_descriptor, new java.lang.String[] { "Provider", "ProviderUri", }); internal_static_google_maps_places_v1_Place_PaymentOptions_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(4); internal_static_google_maps_places_v1_Place_PaymentOptions_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_PaymentOptions_descriptor, new java.lang.String[] { "AcceptsCreditCards", "AcceptsDebitCards", "AcceptsCashOnly", "AcceptsNfc", }); internal_static_google_maps_places_v1_Place_ParkingOptions_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(5); internal_static_google_maps_places_v1_Place_ParkingOptions_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_ParkingOptions_descriptor, new java.lang.String[] { "FreeParkingLot", "PaidParkingLot", "FreeStreetParking", "PaidStreetParking", "ValetParking", "FreeGarageParking", "PaidGarageParking", }); internal_static_google_maps_places_v1_Place_SubDestination_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(6); internal_static_google_maps_places_v1_Place_SubDestination_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_SubDestination_descriptor, new java.lang.String[] { "Name", "Id", }); internal_static_google_maps_places_v1_Place_AccessibilityOptions_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(7); internal_static_google_maps_places_v1_Place_AccessibilityOptions_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_AccessibilityOptions_descriptor, new java.lang.String[] { "WheelchairAccessibleParking", "WheelchairAccessibleEntrance", "WheelchairAccessibleRestroom", "WheelchairAccessibleSeating", }); internal_static_google_maps_places_v1_Place_GenerativeSummary_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(8); internal_static_google_maps_places_v1_Place_GenerativeSummary_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_GenerativeSummary_descriptor, new java.lang.String[] { "Overview", "OverviewFlagContentUri", "DisclosureText", }); internal_static_google_maps_places_v1_Place_ContainingPlace_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(9); internal_static_google_maps_places_v1_Place_ContainingPlace_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_ContainingPlace_descriptor, new java.lang.String[] { "Name", "Id", }); internal_static_google_maps_places_v1_Place_ReviewSummary_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(10); internal_static_google_maps_places_v1_Place_ReviewSummary_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_ReviewSummary_descriptor, new java.lang.String[] { "Text", "FlagContentUri", "DisclosureText", }); internal_static_google_maps_places_v1_Place_EvChargeAmenitySummary_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(11); internal_static_google_maps_places_v1_Place_EvChargeAmenitySummary_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_EvChargeAmenitySummary_descriptor, new java.lang.String[] { "Overview", "Coffee", "Restaurant", "Store", "FlagContentUri", "DisclosureText", }); internal_static_google_maps_places_v1_Place_NeighborhoodSummary_descriptor = internal_static_google_maps_places_v1_Place_descriptor.getNestedTypes().get(12); internal_static_google_maps_places_v1_Place_NeighborhoodSummary_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_maps_places_v1_Place_NeighborhoodSummary_descriptor, new java.lang.String[] { "Overview", "Description", "FlagContentUri", "DisclosureText", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ResourceProto.resource); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.ResourceProto.getDescriptor(); com.google.geo.type.ViewportProto.getDescriptor(); com.google.maps.places.v1.AddressDescriptorProto.getDescriptor(); com.google.maps.places.v1.ContentBlockProto.getDescriptor(); com.google.maps.places.v1.EvChargingProto.getDescriptor(); com.google.maps.places.v1.FuelOptionsProto.getDescriptor(); com.google.maps.places.v1.PhotoProto.getDescriptor(); com.google.maps.places.v1.PriceRangeProto.getDescriptor(); com.google.maps.places.v1.ReviewProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.type.DateProto.getDescriptor(); com.google.type.DateTimeProto.getDescriptor(); com.google.type.LatLngProto.getDescriptor(); com.google.type.LocalizedTextProto.getDescriptor(); com.google.type.PostalAddressProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
googleapis/google-cloud-java
38,185
java-speech/google-cloud-speech/src/main/java/com/google/cloud/speech/v1/stub/HttpJsonAdaptationStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.speech.v1.stub; import static com.google.cloud.speech.v1.AdaptationClient.ListCustomClassesPagedResponse; import static com.google.cloud.speech.v1.AdaptationClient.ListPhraseSetPagedResponse; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.speech.v1.CreateCustomClassRequest; import com.google.cloud.speech.v1.CreatePhraseSetRequest; import com.google.cloud.speech.v1.CustomClass; import com.google.cloud.speech.v1.DeleteCustomClassRequest; import com.google.cloud.speech.v1.DeletePhraseSetRequest; import com.google.cloud.speech.v1.GetCustomClassRequest; import com.google.cloud.speech.v1.GetPhraseSetRequest; import com.google.cloud.speech.v1.ListCustomClassesRequest; import com.google.cloud.speech.v1.ListCustomClassesResponse; import com.google.cloud.speech.v1.ListPhraseSetRequest; import com.google.cloud.speech.v1.ListPhraseSetResponse; import com.google.cloud.speech.v1.PhraseSet; import com.google.cloud.speech.v1.UpdateCustomClassRequest; import com.google.cloud.speech.v1.UpdatePhraseSetRequest; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the Adaptation service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonAdaptationStub extends AdaptationStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build(); private static final ApiMethodDescriptor<CreatePhraseSetRequest, PhraseSet> createPhraseSetMethodDescriptor = ApiMethodDescriptor.<CreatePhraseSetRequest, PhraseSet>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/CreatePhraseSet") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreatePhraseSetRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/phraseSets", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreatePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreatePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearParent().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<PhraseSet>newBuilder() .setDefaultInstance(PhraseSet.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetPhraseSetRequest, PhraseSet> getPhraseSetMethodDescriptor = ApiMethodDescriptor.<GetPhraseSetRequest, PhraseSet>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/GetPhraseSet") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetPhraseSetRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/phraseSets/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetPhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetPhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<PhraseSet>newBuilder() .setDefaultInstance(PhraseSet.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListPhraseSetRequest, ListPhraseSetResponse> listPhraseSetMethodDescriptor = ApiMethodDescriptor.<ListPhraseSetRequest, ListPhraseSetResponse>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/ListPhraseSet") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListPhraseSetRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/phraseSets", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListPhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListPhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListPhraseSetResponse>newBuilder() .setDefaultInstance(ListPhraseSetResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdatePhraseSetRequest, PhraseSet> updatePhraseSetMethodDescriptor = ApiMethodDescriptor.<UpdatePhraseSetRequest, PhraseSet>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/UpdatePhraseSet") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdatePhraseSetRequest>newBuilder() .setPath( "/v1/{phraseSet.name=projects/*/locations/*/phraseSets/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdatePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "phraseSet.name", request.getPhraseSet().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdatePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("phraseSet", request.getPhraseSet(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<PhraseSet>newBuilder() .setDefaultInstance(PhraseSet.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeletePhraseSetRequest, Empty> deletePhraseSetMethodDescriptor = ApiMethodDescriptor.<DeletePhraseSetRequest, Empty>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/DeletePhraseSet") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeletePhraseSetRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/phraseSets/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeletePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeletePhraseSetRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Empty>newBuilder() .setDefaultInstance(Empty.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<CreateCustomClassRequest, CustomClass> createCustomClassMethodDescriptor = ApiMethodDescriptor.<CreateCustomClassRequest, CustomClass>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/CreateCustomClass") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateCustomClassRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/customClasses", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("*", request.toBuilder().clearParent().build(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<CustomClass>newBuilder() .setDefaultInstance(CustomClass.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetCustomClassRequest, CustomClass> getCustomClassMethodDescriptor = ApiMethodDescriptor.<GetCustomClassRequest, CustomClass>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/GetCustomClass") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetCustomClassRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/customClasses/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<CustomClass>newBuilder() .setDefaultInstance(CustomClass.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListCustomClassesRequest, ListCustomClassesResponse> listCustomClassesMethodDescriptor = ApiMethodDescriptor.<ListCustomClassesRequest, ListCustomClassesResponse>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/ListCustomClasses") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListCustomClassesRequest>newBuilder() .setPath( "/v1/{parent=projects/*/locations/*}/customClasses", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListCustomClassesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListCustomClassesRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListCustomClassesResponse>newBuilder() .setDefaultInstance(ListCustomClassesResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<UpdateCustomClassRequest, CustomClass> updateCustomClassMethodDescriptor = ApiMethodDescriptor.<UpdateCustomClassRequest, CustomClass>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/UpdateCustomClass") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateCustomClassRequest>newBuilder() .setPath( "/v1/{customClass.name=projects/*/locations/*/customClasses/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "customClass.name", request.getCustomClass().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("customClass", request.getCustomClass(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<CustomClass>newBuilder() .setDefaultInstance(CustomClass.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeleteCustomClassRequest, Empty> deleteCustomClassMethodDescriptor = ApiMethodDescriptor.<DeleteCustomClassRequest, Empty>newBuilder() .setFullMethodName("google.cloud.speech.v1.Adaptation/DeleteCustomClass") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteCustomClassRequest>newBuilder() .setPath( "/v1/{name=projects/*/locations/*/customClasses/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteCustomClassRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Empty>newBuilder() .setDefaultInstance(Empty.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<CreatePhraseSetRequest, PhraseSet> createPhraseSetCallable; private final UnaryCallable<GetPhraseSetRequest, PhraseSet> getPhraseSetCallable; private final UnaryCallable<ListPhraseSetRequest, ListPhraseSetResponse> listPhraseSetCallable; private final UnaryCallable<ListPhraseSetRequest, ListPhraseSetPagedResponse> listPhraseSetPagedCallable; private final UnaryCallable<UpdatePhraseSetRequest, PhraseSet> updatePhraseSetCallable; private final UnaryCallable<DeletePhraseSetRequest, Empty> deletePhraseSetCallable; private final UnaryCallable<CreateCustomClassRequest, CustomClass> createCustomClassCallable; private final UnaryCallable<GetCustomClassRequest, CustomClass> getCustomClassCallable; private final UnaryCallable<ListCustomClassesRequest, ListCustomClassesResponse> listCustomClassesCallable; private final UnaryCallable<ListCustomClassesRequest, ListCustomClassesPagedResponse> listCustomClassesPagedCallable; private final UnaryCallable<UpdateCustomClassRequest, CustomClass> updateCustomClassCallable; private final UnaryCallable<DeleteCustomClassRequest, Empty> deleteCustomClassCallable; private final BackgroundResource backgroundResources; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonAdaptationStub create(AdaptationStubSettings settings) throws IOException { return new HttpJsonAdaptationStub(settings, ClientContext.create(settings)); } public static final HttpJsonAdaptationStub create(ClientContext clientContext) throws IOException { return new HttpJsonAdaptationStub( AdaptationStubSettings.newHttpJsonBuilder().build(), clientContext); } public static final HttpJsonAdaptationStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonAdaptationStub( AdaptationStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonAdaptationStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonAdaptationStub(AdaptationStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonAdaptationCallableFactory()); } /** * Constructs an instance of HttpJsonAdaptationStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonAdaptationStub( AdaptationStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; HttpJsonCallSettings<CreatePhraseSetRequest, PhraseSet> createPhraseSetTransportSettings = HttpJsonCallSettings.<CreatePhraseSetRequest, PhraseSet>newBuilder() .setMethodDescriptor(createPhraseSetMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetPhraseSetRequest, PhraseSet> getPhraseSetTransportSettings = HttpJsonCallSettings.<GetPhraseSetRequest, PhraseSet>newBuilder() .setMethodDescriptor(getPhraseSetMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListPhraseSetRequest, ListPhraseSetResponse> listPhraseSetTransportSettings = HttpJsonCallSettings.<ListPhraseSetRequest, ListPhraseSetResponse>newBuilder() .setMethodDescriptor(listPhraseSetMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdatePhraseSetRequest, PhraseSet> updatePhraseSetTransportSettings = HttpJsonCallSettings.<UpdatePhraseSetRequest, PhraseSet>newBuilder() .setMethodDescriptor(updatePhraseSetMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("phrase_set.name", String.valueOf(request.getPhraseSet().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeletePhraseSetRequest, Empty> deletePhraseSetTransportSettings = HttpJsonCallSettings.<DeletePhraseSetRequest, Empty>newBuilder() .setMethodDescriptor(deletePhraseSetMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<CreateCustomClassRequest, CustomClass> createCustomClassTransportSettings = HttpJsonCallSettings.<CreateCustomClassRequest, CustomClass>newBuilder() .setMethodDescriptor(createCustomClassMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetCustomClassRequest, CustomClass> getCustomClassTransportSettings = HttpJsonCallSettings.<GetCustomClassRequest, CustomClass>newBuilder() .setMethodDescriptor(getCustomClassMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListCustomClassesRequest, ListCustomClassesResponse> listCustomClassesTransportSettings = HttpJsonCallSettings.<ListCustomClassesRequest, ListCustomClassesResponse>newBuilder() .setMethodDescriptor(listCustomClassesMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateCustomClassRequest, CustomClass> updateCustomClassTransportSettings = HttpJsonCallSettings.<UpdateCustomClassRequest, CustomClass>newBuilder() .setMethodDescriptor(updateCustomClassMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "custom_class.name", String.valueOf(request.getCustomClass().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteCustomClassRequest, Empty> deleteCustomClassTransportSettings = HttpJsonCallSettings.<DeleteCustomClassRequest, Empty>newBuilder() .setMethodDescriptor(deleteCustomClassMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.createPhraseSetCallable = callableFactory.createUnaryCallable( createPhraseSetTransportSettings, settings.createPhraseSetSettings(), clientContext); this.getPhraseSetCallable = callableFactory.createUnaryCallable( getPhraseSetTransportSettings, settings.getPhraseSetSettings(), clientContext); this.listPhraseSetCallable = callableFactory.createUnaryCallable( listPhraseSetTransportSettings, settings.listPhraseSetSettings(), clientContext); this.listPhraseSetPagedCallable = callableFactory.createPagedCallable( listPhraseSetTransportSettings, settings.listPhraseSetSettings(), clientContext); this.updatePhraseSetCallable = callableFactory.createUnaryCallable( updatePhraseSetTransportSettings, settings.updatePhraseSetSettings(), clientContext); this.deletePhraseSetCallable = callableFactory.createUnaryCallable( deletePhraseSetTransportSettings, settings.deletePhraseSetSettings(), clientContext); this.createCustomClassCallable = callableFactory.createUnaryCallable( createCustomClassTransportSettings, settings.createCustomClassSettings(), clientContext); this.getCustomClassCallable = callableFactory.createUnaryCallable( getCustomClassTransportSettings, settings.getCustomClassSettings(), clientContext); this.listCustomClassesCallable = callableFactory.createUnaryCallable( listCustomClassesTransportSettings, settings.listCustomClassesSettings(), clientContext); this.listCustomClassesPagedCallable = callableFactory.createPagedCallable( listCustomClassesTransportSettings, settings.listCustomClassesSettings(), clientContext); this.updateCustomClassCallable = callableFactory.createUnaryCallable( updateCustomClassTransportSettings, settings.updateCustomClassSettings(), clientContext); this.deleteCustomClassCallable = callableFactory.createUnaryCallable( deleteCustomClassTransportSettings, settings.deleteCustomClassSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(createPhraseSetMethodDescriptor); methodDescriptors.add(getPhraseSetMethodDescriptor); methodDescriptors.add(listPhraseSetMethodDescriptor); methodDescriptors.add(updatePhraseSetMethodDescriptor); methodDescriptors.add(deletePhraseSetMethodDescriptor); methodDescriptors.add(createCustomClassMethodDescriptor); methodDescriptors.add(getCustomClassMethodDescriptor); methodDescriptors.add(listCustomClassesMethodDescriptor); methodDescriptors.add(updateCustomClassMethodDescriptor); methodDescriptors.add(deleteCustomClassMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<CreatePhraseSetRequest, PhraseSet> createPhraseSetCallable() { return createPhraseSetCallable; } @Override public UnaryCallable<GetPhraseSetRequest, PhraseSet> getPhraseSetCallable() { return getPhraseSetCallable; } @Override public UnaryCallable<ListPhraseSetRequest, ListPhraseSetResponse> listPhraseSetCallable() { return listPhraseSetCallable; } @Override public UnaryCallable<ListPhraseSetRequest, ListPhraseSetPagedResponse> listPhraseSetPagedCallable() { return listPhraseSetPagedCallable; } @Override public UnaryCallable<UpdatePhraseSetRequest, PhraseSet> updatePhraseSetCallable() { return updatePhraseSetCallable; } @Override public UnaryCallable<DeletePhraseSetRequest, Empty> deletePhraseSetCallable() { return deletePhraseSetCallable; } @Override public UnaryCallable<CreateCustomClassRequest, CustomClass> createCustomClassCallable() { return createCustomClassCallable; } @Override public UnaryCallable<GetCustomClassRequest, CustomClass> getCustomClassCallable() { return getCustomClassCallable; } @Override public UnaryCallable<ListCustomClassesRequest, ListCustomClassesResponse> listCustomClassesCallable() { return listCustomClassesCallable; } @Override public UnaryCallable<ListCustomClassesRequest, ListCustomClassesPagedResponse> listCustomClassesPagedCallable() { return listCustomClassesPagedCallable; } @Override public UnaryCallable<UpdateCustomClassRequest, CustomClass> updateCustomClassCallable() { return updateCustomClassCallable; } @Override public UnaryCallable<DeleteCustomClassRequest, Empty> deleteCustomClassCallable() { return deleteCustomClassCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-cloud-java
37,851
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/DatabaseResourceReference.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Identifies a single database resource, like a table within a database. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DatabaseResourceReference} */ public final class DatabaseResourceReference extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DatabaseResourceReference) DatabaseResourceReferenceOrBuilder { private static final long serialVersionUID = 0L; // Use DatabaseResourceReference.newBuilder() to construct. private DatabaseResourceReference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DatabaseResourceReference() { projectId_ = ""; instance_ = ""; database_ = ""; databaseResource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DatabaseResourceReference(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DatabaseResourceReference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DatabaseResourceReference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DatabaseResourceReference.class, com.google.privacy.dlp.v2.DatabaseResourceReference.Builder.class); } public static final int PROJECT_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object projectId_ = ""; /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The projectId. */ @java.lang.Override public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for projectId. */ @java.lang.Override public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object instance_ = ""; /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instance. */ @java.lang.Override public java.lang.String getInstance() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instance_ = s; return s; } } /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instance. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instance_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATABASE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object database_ = ""; /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The database. */ @java.lang.Override public java.lang.String getDatabase() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } } /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for database. */ @java.lang.Override public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATABASE_RESOURCE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object databaseResource_ = ""; /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The databaseResource. */ @java.lang.Override public java.lang.String getDatabaseResource() { java.lang.Object ref = databaseResource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); databaseResource_ = s; return s; } } /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for databaseResource. */ @java.lang.Override public com.google.protobuf.ByteString getDatabaseResourceBytes() { java.lang.Object ref = databaseResource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); databaseResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instance_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instance_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, database_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseResource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, databaseResource_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instance_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instance_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(database_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, database_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseResource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, databaseResource_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.DatabaseResourceReference)) { return super.equals(obj); } com.google.privacy.dlp.v2.DatabaseResourceReference other = (com.google.privacy.dlp.v2.DatabaseResourceReference) obj; if (!getProjectId().equals(other.getProjectId())) return false; if (!getInstance().equals(other.getInstance())) return false; if (!getDatabase().equals(other.getDatabase())) return false; if (!getDatabaseResource().equals(other.getDatabaseResource())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); hash = (37 * hash) + DATABASE_FIELD_NUMBER; hash = (53 * hash) + getDatabase().hashCode(); hash = (37 * hash) + DATABASE_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getDatabaseResource().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DatabaseResourceReference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.DatabaseResourceReference prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Identifies a single database resource, like a table within a database. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DatabaseResourceReference} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DatabaseResourceReference) com.google.privacy.dlp.v2.DatabaseResourceReferenceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DatabaseResourceReference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DatabaseResourceReference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DatabaseResourceReference.class, com.google.privacy.dlp.v2.DatabaseResourceReference.Builder.class); } // Construct using com.google.privacy.dlp.v2.DatabaseResourceReference.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; projectId_ = ""; instance_ = ""; database_ = ""; databaseResource_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DatabaseResourceReference_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.DatabaseResourceReference getDefaultInstanceForType() { return com.google.privacy.dlp.v2.DatabaseResourceReference.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.DatabaseResourceReference build() { com.google.privacy.dlp.v2.DatabaseResourceReference result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.DatabaseResourceReference buildPartial() { com.google.privacy.dlp.v2.DatabaseResourceReference result = new com.google.privacy.dlp.v2.DatabaseResourceReference(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.DatabaseResourceReference result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.projectId_ = projectId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instance_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.database_ = database_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.databaseResource_ = databaseResource_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.DatabaseResourceReference) { return mergeFrom((com.google.privacy.dlp.v2.DatabaseResourceReference) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.DatabaseResourceReference other) { if (other == com.google.privacy.dlp.v2.DatabaseResourceReference.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getInstance().isEmpty()) { instance_ = other.instance_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getDatabase().isEmpty()) { database_ = other.database_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getDatabaseResource().isEmpty()) { databaseResource_ = other.databaseResource_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { projectId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { instance_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { database_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { databaseResource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object projectId_ = ""; /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The projectId. */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for projectId. */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The projectId to set. * @return This builder for chaining. */ public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. If within a project-level config, then this must match the * config's project ID. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for projectId to set. * @return This builder for chaining. */ public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object instance_ = ""; /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The instance. */ public java.lang.String getInstance() { java.lang.Object ref = instance_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instance_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for instance. */ public com.google.protobuf.ByteString getInstanceBytes() { java.lang.Object ref = instance_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instance_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The instance to set. * @return This builder for chaining. */ public Builder setInstance(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instance_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearInstance() { instance_ = getDefaultInstance().getInstance(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The instance where this resource is located. For example: Cloud * SQL instance ID. * </pre> * * <code>string instance = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for instance to set. * @return This builder for chaining. */ public Builder setInstanceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instance_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object database_ = ""; /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The database. */ public java.lang.String getDatabase() { java.lang.Object ref = database_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); database_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for database. */ public com.google.protobuf.ByteString getDatabaseBytes() { java.lang.Object ref = database_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); database_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The database to set. * @return This builder for chaining. */ public Builder setDatabase(java.lang.String value) { if (value == null) { throw new NullPointerException(); } database_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearDatabase() { database_ = getDefaultInstance().getDatabase(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. Name of a database within the instance. * </pre> * * <code>string database = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for database to set. * @return This builder for chaining. */ public Builder setDatabaseBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); database_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object databaseResource_ = ""; /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The databaseResource. */ public java.lang.String getDatabaseResource() { java.lang.Object ref = databaseResource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); databaseResource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for databaseResource. */ public com.google.protobuf.ByteString getDatabaseResourceBytes() { java.lang.Object ref = databaseResource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); databaseResource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The databaseResource to set. * @return This builder for chaining. */ public Builder setDatabaseResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } databaseResource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearDatabaseResource() { databaseResource_ = getDefaultInstance().getDatabaseResource(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Required. Name of a database resource, for example, a table within the * database. * </pre> * * <code>string database_resource = 4 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for databaseResource to set. * @return This builder for chaining. */ public Builder setDatabaseResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); databaseResource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DatabaseResourceReference) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DatabaseResourceReference) private static final com.google.privacy.dlp.v2.DatabaseResourceReference DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DatabaseResourceReference(); } public static com.google.privacy.dlp.v2.DatabaseResourceReference getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DatabaseResourceReference> PARSER = new com.google.protobuf.AbstractParser<DatabaseResourceReference>() { @java.lang.Override public DatabaseResourceReference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DatabaseResourceReference> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DatabaseResourceReference> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.DatabaseResourceReference getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
38,022
clients/google-api-services-civicinfo/v2/2.0.0/com/google/api/services/civicinfo/v2/CivicInfo.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.civicinfo.v2; /** * Service definition for CivicInfo (v2). * * <p> * Provides polling places, early vote locations, contest data, election officials, and government representatives for U.S. residential addresses. * </p> * * <p> * For more information about this service, see the * <a href="https://developers.google.com/civic-information/" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link CivicInfoRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class CivicInfo extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( (com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 || (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 && com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1))) || com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION >= 2, "You are currently running with version %s of google-api-client. " + "You need at least version 1.31.1 of google-api-client to run version " + "2.0.0 of the Google Civic Information API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://civicinfo.googleapis.com/"; /** * The default encoded mTLS root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.31 */ public static final String DEFAULT_MTLS_ROOT_URL = "https://civicinfo.mtls.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public CivicInfo(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ CivicInfo(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Divisions collection. * * <p>The typical use is:</p> * <pre> * {@code CivicInfo civicinfo = new CivicInfo(...);} * {@code CivicInfo.Divisions.List request = civicinfo.divisions().list(parameters ...)} * </pre> * * @return the resource collection */ public Divisions divisions() { return new Divisions(); } /** * The "divisions" collection of methods. */ public class Divisions { /** * Lookup OCDIDs and names for divisions related to an address. * * Create a request for the method "divisions.queryDivisionByAddress". * * This request holds the parameters needed by the civicinfo server. After setting any optional * parameters, call the {@link QueryDivisionByAddress#execute()} method to invoke the remote * operation. * * @return the request */ public QueryDivisionByAddress queryDivisionByAddress() throws java.io.IOException { QueryDivisionByAddress result = new QueryDivisionByAddress(); initialize(result); return result; } public class QueryDivisionByAddress extends CivicInfoRequest<com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2DivisionByAddressResponse> { private static final String REST_PATH = "civicinfo/v2/divisionsByAddress"; /** * Lookup OCDIDs and names for divisions related to an address. * * Create a request for the method "divisions.queryDivisionByAddress". * * This request holds the parameters needed by the the civicinfo server. After setting any * optional parameters, call the {@link QueryDivisionByAddress#execute()} method to invoke the * remote operation. <p> {@link QueryDivisionByAddress#initialize(com.google.api.client.googleapis * .services.AbstractGoogleClientRequest)} must be called to initialize this instance immediately * after invoking the constructor. </p> * * @since 1.13 */ protected QueryDivisionByAddress() { super(CivicInfo.this, "GET", REST_PATH, null, com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2DivisionByAddressResponse.class); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public QueryDivisionByAddress set$Xgafv(java.lang.String $Xgafv) { return (QueryDivisionByAddress) super.set$Xgafv($Xgafv); } @Override public QueryDivisionByAddress setAccessToken(java.lang.String accessToken) { return (QueryDivisionByAddress) super.setAccessToken(accessToken); } @Override public QueryDivisionByAddress setAlt(java.lang.String alt) { return (QueryDivisionByAddress) super.setAlt(alt); } @Override public QueryDivisionByAddress setCallback(java.lang.String callback) { return (QueryDivisionByAddress) super.setCallback(callback); } @Override public QueryDivisionByAddress setFields(java.lang.String fields) { return (QueryDivisionByAddress) super.setFields(fields); } @Override public QueryDivisionByAddress setKey(java.lang.String key) { return (QueryDivisionByAddress) super.setKey(key); } @Override public QueryDivisionByAddress setOauthToken(java.lang.String oauthToken) { return (QueryDivisionByAddress) super.setOauthToken(oauthToken); } @Override public QueryDivisionByAddress setPrettyPrint(java.lang.Boolean prettyPrint) { return (QueryDivisionByAddress) super.setPrettyPrint(prettyPrint); } @Override public QueryDivisionByAddress setQuotaUser(java.lang.String quotaUser) { return (QueryDivisionByAddress) super.setQuotaUser(quotaUser); } @Override public QueryDivisionByAddress setUploadType(java.lang.String uploadType) { return (QueryDivisionByAddress) super.setUploadType(uploadType); } @Override public QueryDivisionByAddress setUploadProtocol(java.lang.String uploadProtocol) { return (QueryDivisionByAddress) super.setUploadProtocol(uploadProtocol); } @com.google.api.client.util.Key private java.lang.String address; /** */ public java.lang.String getAddress() { return address; } public QueryDivisionByAddress setAddress(java.lang.String address) { this.address = address; return this; } @Override public QueryDivisionByAddress set(String parameterName, Object value) { return (QueryDivisionByAddress) super.set(parameterName, value); } } /** * Searches for political divisions by their natural name or OCD ID. * * Create a request for the method "divisions.search". * * This request holds the parameters needed by the civicinfo server. After setting any optional * parameters, call the {@link Search#execute()} method to invoke the remote operation. * * @return the request */ public Search search() throws java.io.IOException { Search result = new Search(); initialize(result); return result; } public class Search extends CivicInfoRequest<com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2DivisionSearchResponse> { private static final String REST_PATH = "civicinfo/v2/divisions"; /** * Searches for political divisions by their natural name or OCD ID. * * Create a request for the method "divisions.search". * * This request holds the parameters needed by the the civicinfo server. After setting any * optional parameters, call the {@link Search#execute()} method to invoke the remote operation. * <p> {@link * Search#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @since 1.13 */ protected Search() { super(CivicInfo.this, "GET", REST_PATH, null, com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2DivisionSearchResponse.class); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Search set$Xgafv(java.lang.String $Xgafv) { return (Search) super.set$Xgafv($Xgafv); } @Override public Search setAccessToken(java.lang.String accessToken) { return (Search) super.setAccessToken(accessToken); } @Override public Search setAlt(java.lang.String alt) { return (Search) super.setAlt(alt); } @Override public Search setCallback(java.lang.String callback) { return (Search) super.setCallback(callback); } @Override public Search setFields(java.lang.String fields) { return (Search) super.setFields(fields); } @Override public Search setKey(java.lang.String key) { return (Search) super.setKey(key); } @Override public Search setOauthToken(java.lang.String oauthToken) { return (Search) super.setOauthToken(oauthToken); } @Override public Search setPrettyPrint(java.lang.Boolean prettyPrint) { return (Search) super.setPrettyPrint(prettyPrint); } @Override public Search setQuotaUser(java.lang.String quotaUser) { return (Search) super.setQuotaUser(quotaUser); } @Override public Search setUploadType(java.lang.String uploadType) { return (Search) super.setUploadType(uploadType); } @Override public Search setUploadProtocol(java.lang.String uploadProtocol) { return (Search) super.setUploadProtocol(uploadProtocol); } /** * The search query. Queries can cover any parts of a OCD ID or a human readable division * name. All words given in the query are treated as required patterns. In addition to that, * most query operators of the Apache Lucene library are supported. See * http://lucene.apache.org/core/2_9_4/queryparsersyntax.html */ @com.google.api.client.util.Key private java.lang.String query; /** The search query. Queries can cover any parts of a OCD ID or a human readable division name. All words given in the query are treated as required patterns. In addition to that, most query operators of the Apache Lucene library are supported. See http://lucene.apache.org/core/2_9_4/queryparsersyntax.html */ public java.lang.String getQuery() { return query; } /** * The search query. Queries can cover any parts of a OCD ID or a human readable division * name. All words given in the query are treated as required patterns. In addition to that, * most query operators of the Apache Lucene library are supported. See * http://lucene.apache.org/core/2_9_4/queryparsersyntax.html */ public Search setQuery(java.lang.String query) { this.query = query; return this; } @Override public Search set(String parameterName, Object value) { return (Search) super.set(parameterName, value); } } } /** * An accessor for creating requests from the Elections collection. * * <p>The typical use is:</p> * <pre> * {@code CivicInfo civicinfo = new CivicInfo(...);} * {@code CivicInfo.Elections.List request = civicinfo.elections().list(parameters ...)} * </pre> * * @return the resource collection */ public Elections elections() { return new Elections(); } /** * The "elections" collection of methods. */ public class Elections { /** * List of available elections to query. * * Create a request for the method "elections.electionQuery". * * This request holds the parameters needed by the civicinfo server. After setting any optional * parameters, call the {@link ElectionQuery#execute()} method to invoke the remote operation. * * @return the request */ public ElectionQuery electionQuery() throws java.io.IOException { ElectionQuery result = new ElectionQuery(); initialize(result); return result; } public class ElectionQuery extends CivicInfoRequest<com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2ElectionsQueryResponse> { private static final String REST_PATH = "civicinfo/v2/elections"; /** * List of available elections to query. * * Create a request for the method "elections.electionQuery". * * This request holds the parameters needed by the the civicinfo server. After setting any * optional parameters, call the {@link ElectionQuery#execute()} method to invoke the remote * operation. <p> {@link ElectionQuery#initialize(com.google.api.client.googleapis.services.Abstra * ctGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @since 1.13 */ protected ElectionQuery() { super(CivicInfo.this, "GET", REST_PATH, null, com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2ElectionsQueryResponse.class); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public ElectionQuery set$Xgafv(java.lang.String $Xgafv) { return (ElectionQuery) super.set$Xgafv($Xgafv); } @Override public ElectionQuery setAccessToken(java.lang.String accessToken) { return (ElectionQuery) super.setAccessToken(accessToken); } @Override public ElectionQuery setAlt(java.lang.String alt) { return (ElectionQuery) super.setAlt(alt); } @Override public ElectionQuery setCallback(java.lang.String callback) { return (ElectionQuery) super.setCallback(callback); } @Override public ElectionQuery setFields(java.lang.String fields) { return (ElectionQuery) super.setFields(fields); } @Override public ElectionQuery setKey(java.lang.String key) { return (ElectionQuery) super.setKey(key); } @Override public ElectionQuery setOauthToken(java.lang.String oauthToken) { return (ElectionQuery) super.setOauthToken(oauthToken); } @Override public ElectionQuery setPrettyPrint(java.lang.Boolean prettyPrint) { return (ElectionQuery) super.setPrettyPrint(prettyPrint); } @Override public ElectionQuery setQuotaUser(java.lang.String quotaUser) { return (ElectionQuery) super.setQuotaUser(quotaUser); } @Override public ElectionQuery setUploadType(java.lang.String uploadType) { return (ElectionQuery) super.setUploadType(uploadType); } @Override public ElectionQuery setUploadProtocol(java.lang.String uploadProtocol) { return (ElectionQuery) super.setUploadProtocol(uploadProtocol); } /** Whether to include data that has not been allowlisted yet */ @com.google.api.client.util.Key private java.lang.Boolean productionDataOnly; /** Whether to include data that has not been allowlisted yet [default: true] */ public java.lang.Boolean getProductionDataOnly() { return productionDataOnly; } /** Whether to include data that has not been allowlisted yet */ public ElectionQuery setProductionDataOnly(java.lang.Boolean productionDataOnly) { this.productionDataOnly = productionDataOnly; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * Whether to include data that has not been allowlisted yet * </p> */ public boolean isProductionDataOnly() { if (productionDataOnly == null || productionDataOnly == com.google.api.client.util.Data.NULL_BOOLEAN) { return true; } return productionDataOnly; } @Override public ElectionQuery set(String parameterName, Object value) { return (ElectionQuery) super.set(parameterName, value); } } /** * Looks up information relevant to a voter based on the voter's registered address. * * Create a request for the method "elections.voterInfoQuery". * * This request holds the parameters needed by the civicinfo server. After setting any optional * parameters, call the {@link VoterInfoQuery#execute()} method to invoke the remote operation. * * @return the request */ public VoterInfoQuery voterInfoQuery() throws java.io.IOException { VoterInfoQuery result = new VoterInfoQuery(); initialize(result); return result; } public class VoterInfoQuery extends CivicInfoRequest<com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2VoterInfoResponse> { private static final String REST_PATH = "civicinfo/v2/voterinfo"; /** * Looks up information relevant to a voter based on the voter's registered address. * * Create a request for the method "elections.voterInfoQuery". * * This request holds the parameters needed by the the civicinfo server. After setting any * optional parameters, call the {@link VoterInfoQuery#execute()} method to invoke the remote * operation. <p> {@link VoterInfoQuery#initialize(com.google.api.client.googleapis.services.Abstr * actGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @since 1.13 */ protected VoterInfoQuery() { super(CivicInfo.this, "GET", REST_PATH, null, com.google.api.services.civicinfo.v2.model.CivicinfoApiprotosV2VoterInfoResponse.class); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public VoterInfoQuery set$Xgafv(java.lang.String $Xgafv) { return (VoterInfoQuery) super.set$Xgafv($Xgafv); } @Override public VoterInfoQuery setAccessToken(java.lang.String accessToken) { return (VoterInfoQuery) super.setAccessToken(accessToken); } @Override public VoterInfoQuery setAlt(java.lang.String alt) { return (VoterInfoQuery) super.setAlt(alt); } @Override public VoterInfoQuery setCallback(java.lang.String callback) { return (VoterInfoQuery) super.setCallback(callback); } @Override public VoterInfoQuery setFields(java.lang.String fields) { return (VoterInfoQuery) super.setFields(fields); } @Override public VoterInfoQuery setKey(java.lang.String key) { return (VoterInfoQuery) super.setKey(key); } @Override public VoterInfoQuery setOauthToken(java.lang.String oauthToken) { return (VoterInfoQuery) super.setOauthToken(oauthToken); } @Override public VoterInfoQuery setPrettyPrint(java.lang.Boolean prettyPrint) { return (VoterInfoQuery) super.setPrettyPrint(prettyPrint); } @Override public VoterInfoQuery setQuotaUser(java.lang.String quotaUser) { return (VoterInfoQuery) super.setQuotaUser(quotaUser); } @Override public VoterInfoQuery setUploadType(java.lang.String uploadType) { return (VoterInfoQuery) super.setUploadType(uploadType); } @Override public VoterInfoQuery setUploadProtocol(java.lang.String uploadProtocol) { return (VoterInfoQuery) super.setUploadProtocol(uploadProtocol); } /** The registered address of the voter to look up. */ @com.google.api.client.util.Key private java.lang.String address; /** The registered address of the voter to look up. */ public java.lang.String getAddress() { return address; } /** The registered address of the voter to look up. */ public VoterInfoQuery setAddress(java.lang.String address) { this.address = address; return this; } /** * The unique ID of the election to look up. A list of election IDs can be obtained at * https://www.googleapis.com/civicinfo/{version}/elections. If no election ID is specified in * the query and there is more than one election with data for the given voter, the additional * elections are provided in the otherElections response field. */ @com.google.api.client.util.Key private java.lang.Long electionId; /** The unique ID of the election to look up. A list of election IDs can be obtained at https://www.googleapis.com/civicinfo/{version}/elections. If no election ID is specified in the query and there is more than one election with data for the given voter, the additional elections are provided in the otherElections response field. [default: 0] */ public java.lang.Long getElectionId() { return electionId; } /** * The unique ID of the election to look up. A list of election IDs can be obtained at * https://www.googleapis.com/civicinfo/{version}/elections. If no election ID is specified in * the query and there is more than one election with data for the given voter, the additional * elections are provided in the otherElections response field. */ public VoterInfoQuery setElectionId(java.lang.Long electionId) { this.electionId = electionId; return this; } /** If set to true, only data from official state sources will be returned. */ @com.google.api.client.util.Key private java.lang.Boolean officialOnly; /** If set to true, only data from official state sources will be returned. [default: false] */ public java.lang.Boolean getOfficialOnly() { return officialOnly; } /** If set to true, only data from official state sources will be returned. */ public VoterInfoQuery setOfficialOnly(java.lang.Boolean officialOnly) { this.officialOnly = officialOnly; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * If set to true, only data from official state sources will be returned. * </p> */ public boolean isOfficialOnly() { if (officialOnly == null || officialOnly == com.google.api.client.util.Data.NULL_BOOLEAN) { return false; } return officialOnly; } /** * Whether to include data that has not been vetted yet. Should only be made available to * internal IPs or trusted partners. This is a non-discoverable parameter in the One Platform * API config. */ @com.google.api.client.util.Key private java.lang.Boolean productionDataOnly; /** Whether to include data that has not been vetted yet. Should only be made available to internal IPs or trusted partners. This is a non-discoverable parameter in the One Platform API config. [default: true] */ public java.lang.Boolean getProductionDataOnly() { return productionDataOnly; } /** * Whether to include data that has not been vetted yet. Should only be made available to * internal IPs or trusted partners. This is a non-discoverable parameter in the One Platform * API config. */ public VoterInfoQuery setProductionDataOnly(java.lang.Boolean productionDataOnly) { this.productionDataOnly = productionDataOnly; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * Whether to include data that has not been vetted yet. Should only be made available to internal IPs or trusted partners. This is a non-discoverable parameter in the One Platform API config. * </p> */ public boolean isProductionDataOnly() { if (productionDataOnly == null || productionDataOnly == com.google.api.client.util.Data.NULL_BOOLEAN) { return true; } return productionDataOnly; } /** * If set to true, the query will return the success code and include any partial information * when it is unable to determine a matching address or unable to determine the election for * electionId=0 queries. */ @com.google.api.client.util.Key private java.lang.Boolean returnAllAvailableData; /** If set to true, the query will return the success code and include any partial information when it is unable to determine a matching address or unable to determine the election for electionId=0 queries. [default: false] */ public java.lang.Boolean getReturnAllAvailableData() { return returnAllAvailableData; } /** * If set to true, the query will return the success code and include any partial information * when it is unable to determine a matching address or unable to determine the election for * electionId=0 queries. */ public VoterInfoQuery setReturnAllAvailableData(java.lang.Boolean returnAllAvailableData) { this.returnAllAvailableData = returnAllAvailableData; return this; } /** * Convenience method that returns only {@link Boolean#TRUE} or {@link Boolean#FALSE}. * * <p> * Boolean properties can have four possible values: * {@code null}, {@link com.google.api.client.util.Data#NULL_BOOLEAN}, {@link Boolean#TRUE} * or {@link Boolean#FALSE}. * </p> * * <p> * This method returns {@link Boolean#TRUE} if the default of the property is {@link Boolean#TRUE} * and it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * {@link Boolean#FALSE} is returned if the default of the property is {@link Boolean#FALSE} and * it is {@code null} or {@link com.google.api.client.util.Data#NULL_BOOLEAN}. * </p> * * <p> * If set to true, the query will return the success code and include any partial information when it is unable to determine a matching address or unable to determine the election for electionId=0 queries. * </p> */ public boolean isReturnAllAvailableData() { if (returnAllAvailableData == null || returnAllAvailableData == com.google.api.client.util.Data.NULL_BOOLEAN) { return false; } return returnAllAvailableData; } @Override public VoterInfoQuery set(String parameterName, Object value) { return (VoterInfoQuery) super.set(parameterName, value); } } } /** * Builder for {@link CivicInfo}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) { // If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint. // If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS. // Use the regular endpoint for all other cases. String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT"); useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint; if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) { return DEFAULT_MTLS_ROOT_URL; } return DEFAULT_ROOT_URL; } /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, Builder.chooseEndpoint(transport), DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link CivicInfo}. */ @Override public CivicInfo build() { return new CivicInfo(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link CivicInfoRequestInitializer}. * * @since 1.12 */ public Builder setCivicInfoRequestInitializer( CivicInfoRequestInitializer civicinfoRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(civicinfoRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } @Override public Builder setUniverseDomain(String universeDomain) { return (Builder) super.setUniverseDomain(universeDomain); } } }
apache/hive
38,111
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.txn; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.classification.RetrySemantics; import org.apache.hadoop.hive.metastore.api.AbortCompactResponse; import org.apache.hadoop.hive.metastore.api.AbortCompactionRequest; import org.apache.hadoop.hive.metastore.api.AbortTxnRequest; import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest; import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions; import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest; import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsResponse; import org.apache.hadoop.hive.metastore.api.CheckLockRequest; import org.apache.hadoop.hive.metastore.api.CommitTxnRequest; import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionResponse; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FindNextCompactRequest; import org.apache.hadoop.hive.metastore.api.GetLatestCommittedCompactionInfoRequest; import org.apache.hadoop.hive.metastore.api.GetLatestCommittedCompactionInfoResponse; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsRequest; import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsResponse; import org.apache.hadoop.hive.metastore.api.HeartbeatRequest; import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest; import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse; import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.LockRequest; import org.apache.hadoop.hive.metastore.api.LockResponse; import org.apache.hadoop.hive.metastore.api.Materialization; import org.apache.hadoop.hive.metastore.api.MaxAllocatedTableWriteIdRequest; import org.apache.hadoop.hive.metastore.api.MaxAllocatedTableWriteIdResponse; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchCompactionException; import org.apache.hadoop.hive.metastore.api.NoSuchLockException; import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; import org.apache.hadoop.hive.metastore.api.OpenTxnRequest; import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.ReplTblWriteIdStateRequest; import org.apache.hadoop.hive.metastore.api.ReplayedTxnsForPolicyResult; import org.apache.hadoop.hive.metastore.api.SeedTableWriteIdsRequest; import org.apache.hadoop.hive.metastore.api.SeedTxnIdRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.ShowLocksRequest; import org.apache.hadoop.hive.metastore.api.ShowLocksResponse; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.TxnAbortedException; import org.apache.hadoop.hive.metastore.api.TxnOpenException; import org.apache.hadoop.hive.metastore.api.TxnType; import org.apache.hadoop.hive.metastore.api.UnlockRequest; import org.apache.hadoop.hive.metastore.api.UpdateTransactionalStatsRequest; import org.apache.hadoop.hive.metastore.events.ListenerEvent; import org.apache.hadoop.hive.metastore.txn.entities.CompactionInfo; import org.apache.hadoop.hive.metastore.txn.entities.CompactionMetricsData; import org.apache.hadoop.hive.metastore.txn.entities.MetricsInfo; import org.apache.hadoop.hive.metastore.txn.jdbc.MultiDataSourceJdbcResource; import org.apache.hadoop.hive.metastore.txn.retry.SqlRetry; import org.apache.hadoop.hive.metastore.txn.retry.SqlRetryException; import org.apache.hadoop.hive.metastore.txn.retry.SqlRetryHandler; import org.springframework.transaction.annotation.Transactional; import java.sql.SQLException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; /** * A handler to answer transaction related calls that come into the metastore * server. */ @InterfaceAudience.Private @InterfaceStability.Evolving public interface TxnStore extends Configurable { /** * Prefix for key when committing with a key/value. */ String TXN_KEY_START = "_meta"; enum MUTEX_KEY { Initiator, Cleaner, HouseKeeper, IcebergHouseKeeper, TxnCleaner, CompactionScheduler, MaterializationRebuild } // Compactor states (Should really be enum) String INITIATED_RESPONSE = "initiated"; String WORKING_RESPONSE = "working"; String CLEANING_RESPONSE = "ready for cleaning"; String FAILED_RESPONSE = "failed"; String SUCCEEDED_RESPONSE = "succeeded"; String DID_NOT_INITIATE_RESPONSE = "did not initiate"; String REFUSED_RESPONSE = "refused"; String ABORTED_RESPONSE = "aborted"; char INITIATED_STATE = 'i'; char WORKING_STATE = 'w'; char READY_FOR_CLEANING = 'r'; char FAILED_STATE = 'f'; char SUCCEEDED_STATE = 's'; char DID_NOT_INITIATE = 'a'; char REFUSED_STATE = 'c'; char ABORTED_STATE = 'x'; // Compactor types char MAJOR_TYPE = 'a'; char MINOR_TYPE = 'i'; char REBALANCE_TYPE = 'r'; char ABORT_TXN_CLEANUP_TYPE = 'c'; char SMART_OPTIMIZE_TYPE = '*'; String[] COMPACTION_STATES = new String[] {INITIATED_RESPONSE, WORKING_RESPONSE, CLEANING_RESPONSE, FAILED_RESPONSE, SUCCEEDED_RESPONSE, DID_NOT_INITIATE_RESPONSE, REFUSED_RESPONSE }; int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 50000; String POOL_TX = "txnhandler"; String POOL_MUTEX = "mutex"; String POOL_COMPACTOR = "compactor"; /** * @return Returns the {@link SqlRetryHandler} instance used by {@link TxnStore}. */ SqlRetryHandler getRetryHandler(); /** * @return Returns the {@link MultiDataSourceJdbcResource} instance used by {@link TxnStore}. */ MultiDataSourceJdbcResource getJdbcResourceHolder(); /** * Get information about open transactions. This gives extensive information about the * transactions rather than just the list of transactions. This should be used when the need * is to see information about the transactions (e.g. show transactions). * @return information about open transactions * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly GetOpenTxnsInfoResponse getOpenTxnsInfo() throws MetaException; /** * Get list of valid transactions. This gives just the list of transactions that are open. * @return list of open transactions, as well as a high watermark. * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly GetOpenTxnsResponse getOpenTxns() throws MetaException; /** * Get list of valid transactions. This gives just the list of transactions that are open. * @param excludeTxnTypes : excludes this type of txns while getting the open txns * @return list of open transactions, as well as a high watermark. * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly GetOpenTxnsResponse getOpenTxns(List<TxnType> excludeTxnTypes) throws MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly List<Long> getOpenTxnForPolicy(List<Long> openTxnList, String replPolicy); @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly ReplayedTxnsForPolicyResult getReplayedTxnsForPolicy(String replPolicy) throws MetaException; /** * Get the count for open transactions. * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly void countOpenTxns() throws MetaException; /** * Open a set of transactions * @param rqst request to open transactions * @return information on opened transactions * @throws MetaException */ @SqlRetry @Transactional(value = POOL_TX, noRollbackFor = SqlRetryException.class) @RetrySemantics.Idempotent OpenTxnsResponse openTxns(OpenTxnRequest rqst) throws MetaException; @SqlRetry(lockInternally = true) @Transactional(POOL_TX) @RetrySemantics.Idempotent long getTargetTxnId(String replPolicy, long sourceTxnId) throws MetaException; /** * Abort (rollback) a transaction. * @param rqst info on transaction to abort * @throws NoSuchTxnException * @throws MetaException */ @SqlRetry(lockInternally = true) @Transactional(POOL_TX) @RetrySemantics.Idempotent void abortTxn(AbortTxnRequest rqst) throws NoSuchTxnException, MetaException, TxnAbortedException; /** * Abort (rollback) a list of transactions in one request. * @param rqst info on transactions to abort * @throws NoSuchTxnException * @throws MetaException */ @SqlRetry(lockInternally = true) @Transactional(POOL_TX) @RetrySemantics.Idempotent void abortTxns(AbortTxnsRequest rqst) throws NoSuchTxnException, MetaException; /** * Commit a transaction * @param rqst info on transaction to commit * @throws NoSuchTxnException * @throws TxnAbortedException * @throws MetaException */ @SqlRetry(lockInternally = true) @Transactional(value = POOL_TX, noRollbackFor = TxnAbortedException.class) @RetrySemantics.Idempotent("No-op if already committed") void commitTxn(CommitTxnRequest rqst) throws NoSuchTxnException, TxnAbortedException, MetaException; /** * Replicate Table Write Ids state to mark aborted write ids and writeid high water mark. * @param rqst info on table/partitions and writeid snapshot to replicate. * @throws MetaException in case of failure */ @SqlRetry(lockInternally = true) @Transactional(POOL_TX) @RetrySemantics.Idempotent("No-op if already replicated the writeid state") void replTableWriteIdState(ReplTblWriteIdStateRequest rqst) throws MetaException; @Transactional(POOL_TX) void updateTransactionStatistics(UpdateTransactionalStatsRequest req) throws MetaException; /** * Get invalidation info for the materialization. Currently, the materialization information * only contains information about whether there was update/delete operations on the source * tables used by the materialization since it was created. * @param cm creation metadata for the materialization * @param validTxnList valid transaction list for snapshot taken for current query * @throws MetaException */ @Transactional(POOL_TX) @RetrySemantics.Idempotent Materialization getMaterializationInvalidationInfo( final CreationMetadata cm, final String validTxnList) throws MetaException; @RetrySemantics.ReadOnly long getTxnIdForWriteId(String dbName, String tblName, long writeId) throws MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly long getLatestTxnIdInConflict(long txnid) throws MetaException; @SqlRetry(lockInternally = true) @Transactional(POOL_TX) LockResponse lockMaterializationRebuild(String dbName, String tableName, long txnId) throws MetaException; @SqlRetry(lockInternally = true) @Transactional(POOL_TX) boolean heartbeatLockMaterializationRebuild(String dbName, String tableName, long txnId) throws MetaException; @SqlRetry(lockInternally = true) @Transactional(POOL_TX) long cleanupMaterializationRebuildLocks(ValidTxnList validTxnList, long timeout) throws MetaException; /** * Gets the list of valid write ids for the given table wrt to current txn * @param rqst info on transaction and list of table names associated with given transaction * @throws NoSuchTxnException * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly GetValidWriteIdsResponse getValidWriteIds(GetValidWriteIdsRequest rqst) throws NoSuchTxnException, MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.SafeToRetry void addWriteIdsToMinHistory(long txnId, Map<String, Long> minOpenWriteIds) throws MetaException; /** * Allocate a write ID for the given table and associate it with a transaction * @param rqst info on transaction and table to allocate write id * @throws NoSuchTxnException * @throws TxnAbortedException * @throws MetaException */ @SqlRetry(lockInternally = true, retryOnDuplicateKey = true) @Transactional(POOL_TX) AllocateTableWriteIdsResponse allocateTableWriteIds(AllocateTableWriteIdsRequest rqst) throws NoSuchTxnException, TxnAbortedException, MetaException; /** * Reads the maximum allocated writeId for the given table * @param rqst table for which the maximum writeId is requested * @return the maximum allocated writeId */ @SqlRetry @Transactional(POOL_TX) MaxAllocatedTableWriteIdResponse getMaxAllocatedTableWrited(MaxAllocatedTableWriteIdRequest rqst) throws MetaException; /** * Called on conversion of existing table to full acid. Sets initial write ID to a high * enough value so that we can assign unique ROW__IDs to data in existing files. */ @SqlRetry @Transactional(POOL_TX) void seedWriteId(SeedTableWriteIdsRequest rqst) throws MetaException; /** * Sets the next txnId to the given value. * If the actual txnId is greater it will throw an exception. * @param rqst */ @SqlRetry(lockInternally = true) @Transactional(POOL_TX) void seedTxnId(SeedTxnIdRequest rqst) throws MetaException; /** * Obtain a lock. * @param rqst information on the lock to obtain. If the requester is part of a transaction * the txn information must be included in the lock request. * @return info on the lock, including whether it was obtained. * @throws NoSuchTxnException * @throws TxnAbortedException * @throws MetaException */ @RetrySemantics.CannotRetry LockResponse lock(LockRequest rqst) throws NoSuchTxnException, TxnAbortedException, MetaException; /** * Check whether a lock has been obtained. This is used after {@link #lock} returned a wait * state. * @param rqst info on the lock to check * @return info on the state of the lock * @throws NoSuchTxnException * @throws NoSuchLockException * @throws TxnAbortedException * @throws MetaException */ @Transactional(POOL_TX) @RetrySemantics.SafeToRetry LockResponse checkLock(CheckLockRequest rqst) throws NoSuchTxnException, NoSuchLockException, TxnAbortedException, MetaException; /** * Unlock a lock. It is not legal to call this if the caller is part of a txn. In that case * the txn should be committed or aborted instead. (Note someday this will change since * multi-statement transactions will allow unlocking in the transaction.) * @param rqst lock to unlock * @throws NoSuchLockException * @throws TxnOpenException * @throws MetaException */ @Transactional(POOL_TX) @RetrySemantics.Idempotent void unlock(UnlockRequest rqst) throws NoSuchLockException, TxnOpenException, MetaException; /** * Get information on current locks. * @param rqst lock information to retrieve * @return lock information. * @throws MetaException */ @Transactional(POOL_TX) @RetrySemantics.ReadOnly ShowLocksResponse showLocks(ShowLocksRequest rqst) throws MetaException; /** * Send a heartbeat for a lock or a transaction * @param ids lock and/or txn id to heartbeat * @throws NoSuchTxnException * @throws NoSuchLockException * @throws TxnAbortedException * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.SafeToRetry void heartbeat(HeartbeatRequest ids) throws NoSuchTxnException, NoSuchLockException, TxnAbortedException, MetaException; /** * Heartbeat a group of transactions together * @param rqst set of transactions to heartbat * @return info on txns that were heartbeated * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.SafeToRetry HeartbeatTxnRangeResponse heartbeatTxnRange(HeartbeatTxnRangeRequest rqst) throws MetaException; /** * Submit a compaction request into the queue. This is called when a user manually requests a * compaction. * @param rqst information on what to compact * @return id of the compaction that has been started or existing id if this resource is already scheduled * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.Idempotent CompactionResponse compact(CompactionRequest rqst) throws MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.SafeToRetry boolean submitForCleanup(CompactionRequest rqst, long highestWriteId, long txnId) throws MetaException; /** * Show list of current compactions. * @param rqst info on which compactions to show * @return compaction information * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly ShowCompactResponse showCompact(ShowCompactRequest rqst) throws MetaException; /** * Abort (rollback) a compaction. * * @param rqst info on compaction to abort * @return * @throws NoSuchCompactionException * @throws MetaException */ @Transactional(POOL_TX) @RetrySemantics.Idempotent AbortCompactResponse abortCompactions(AbortCompactionRequest rqst) throws NoSuchCompactionException, MetaException; /** * Get one latest record of SUCCEEDED or READY_FOR_CLEANING compaction for a table/partition. * No checking is done on the dbname, tablename, or partitionname to make sure they refer to valid objects. * Is is assumed to be done by the caller. * Note that partition names should be supplied with the request for a partitioned table; otherwise, * no records will be returned. * @param rqst info on which compaction to retrieve * @return one latest compaction record for a non partitioned table or one latest record for each * partition specified by the request. * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly GetLatestCommittedCompactionInfoResponse getLatestCommittedCompactionInfo( GetLatestCommittedCompactionInfoRequest rqst) throws MetaException; /** * Add information on a set of dynamic partitions that participated in a transaction. * @param rqst dynamic partition info. * @throws NoSuchTxnException * @throws TxnAbortedException * @throws MetaException */ @SqlRetry(lockInternally = true) @Transactional(POOL_TX) @RetrySemantics.SafeToRetry void addDynamicPartitions(AddDynamicPartitions rqst) throws NoSuchTxnException, TxnAbortedException, MetaException; /** * Clean up corresponding records in metastore tables. * @param type Hive object type * @param db database object * @param table table object * @param partitionIterator partition iterator * @throws MetaException */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.Idempotent default void cleanupRecords(HiveObjectType type, Database db, Table table, Iterator<Partition> partitionIterator) throws MetaException { cleanupRecords(type, db, table, partitionIterator, false); } @SqlRetry @Transactional(POOL_TX) @RetrySemantics.Idempotent void cleanupRecords(HiveObjectType type, Database db, Table table, Iterator<Partition> partitionIterator, boolean keepTxnToWriteIdMetaData) throws MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.Idempotent void cleanupRecords(HiveObjectType type, Database db, Table table, Iterator<Partition> partitionIterator, long txnId) throws MetaException; @SqlRetry @Transactional(POOL_TX) @RetrySemantics.Idempotent void onRename(String oldCatName, String oldDbName, String oldTabName, String oldPartName, String newCatName, String newDbName, String newTabName, String newPartName) throws MetaException; /** * Timeout transactions and/or locks. This should only be called by the compactor. */ @Transactional(POOL_TX) @RetrySemantics.Idempotent void performTimeOuts(); /** * This will look through the completed_txn_components table and look for partitions or tables * that may be ready for compaction. Also, look through txns and txn_components tables for * aborted transactions that we should add to the list. * @param abortedThreshold number of aborted queries forming a potential compaction request. * @param abortedTimeThreshold age of an aborted txn in milliseconds that will trigger a * potential compaction request. * @return list of CompactionInfo structs. These will not have id, type, * or runAs set since these are only potential compactions not actual ones. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly Set<CompactionInfo> findPotentialCompactions(int abortedThreshold, long abortedTimeThreshold) throws MetaException; @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly Set<CompactionInfo> findPotentialCompactions(int abortedThreshold, long abortedTimeThreshold, long lastChecked) throws MetaException; /** * This updates COMPACTION_QUEUE. Set runAs username for the case where the request was * generated by the user and so the worker must set this value later. Sets highestWriteId so that * cleaner doesn't clean above what compactor has processed. Updates TXN_COMPONENTS so that * we know where {@code compactionTxnId} was writing to in case it aborts. * @param compactionTxnId - txnid in which Compactor is running */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent void updateCompactorState(CompactionInfo ci, long compactionTxnId) throws MetaException; /** * This will grab the next compaction request off of * the queue, and assign it to the worker. * @deprecated Replaced by * {@link TxnStore#findNextToCompact(org.apache.hadoop.hive.metastore.api.FindNextCompactRequest)} * @param workerId id of the worker calling this, will be recorded in the db * @return an info element for this compaction request, or null if there is no work to do now. */ @Deprecated @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly CompactionInfo findNextToCompact(String workerId) throws MetaException; /** * This will grab the next compaction request off of the queue, and assign it to the worker. * @param rqst request to find next compaction to run * @return an info element for next compaction in the queue, or null if there is no work to do now. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly CompactionInfo findNextToCompact(FindNextCompactRequest rqst) throws MetaException; /** * This will mark an entry in the queue as compacted * and put it in the ready to clean state. * @param info info on the compaction entry to mark as compacted. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void markCompacted(CompactionInfo info) throws MetaException; /** * Find entries in the queue that are ready to * be cleaned. * @param minOpenTxnWaterMark Minimum open txnId * @param retentionTime Milliseconds to delay the cleaner * @return information on the entry in the queue. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly List<CompactionInfo> findReadyToClean(long minOpenTxnWaterMark, long retentionTime) throws MetaException; /** * Find the aborted entries in TXN_COMPONENTS which can be used to * clean directories belonging to transactions in aborted state. * @param abortedTimeThreshold Age of table/partition's oldest aborted transaction involving a given table * or partition that will trigger cleanup. * @param abortedThreshold Number of aborted transactions involving a given table or partition * that will trigger cleanup. * @return Information of potential abort items that needs to be cleaned. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly List<CompactionInfo> findReadyToCleanAborts(long abortedTimeThreshold, int abortedThreshold) throws MetaException; /** * Sets the cleaning start time for a particular compaction * * @param info info on the compaction entry */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.CannotRetry void markCleanerStart(CompactionInfo info) throws MetaException; /** * Removes the cleaning start time for a particular compaction * * @param info info on the compaction entry */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.CannotRetry void clearCleanerStart(CompactionInfo info) throws MetaException; /** * This will remove an entry from the queue after * it has been compacted. * * @param info info on the compaction entry to remove */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.CannotRetry void markCleaned(CompactionInfo info) throws MetaException; /** * Mark a compaction entry as failed. This will move it to the compaction history queue with a * failed status. It will NOT clean up aborted transactions in the table/partition associated * with this compaction. * @param info information on the compaction that failed. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.CannotRetry void markFailed(CompactionInfo info) throws MetaException; /** * Mark a compaction as refused (to run). This can happen if a manual compaction is requested by the user, * but for some reason, the table is not suitable for compation. * @param info compaction job. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) void markRefused(CompactionInfo info) throws MetaException; /** * Stores the value of {@link CompactionInfo#retryRetention} and {@link CompactionInfo#errorMessage} fields * of the CompactionInfo either by inserting or updating the fields in the HMS database. * @param info The {@link CompactionInfo} object holding the values. * @throws MetaException */ @SqlRetry(lockInternally = true) @Transactional(POOL_COMPACTOR) @RetrySemantics.CannotRetry void setCleanerRetryRetentionTimeOnError(CompactionInfo info) throws MetaException; /** * Clean up entries from TXN_TO_WRITE_ID table less than min_uncommited_txnid as found by * min(max(TXNS.txn_id), min(WRITE_SET.WS_COMMIT_ID), min(Aborted TXNS.txn_id)). */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void cleanTxnToWriteIdTable() throws MetaException; /** * De-duplicate entries from COMPLETED_TXN_COMPONENTS table. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void removeDuplicateCompletedTxnComponents() throws MetaException; /** * Clean up aborted or committed transactions from txns that have no components in txn_components. The reason such * txns exist can be that no work was done in this txn (e.g. Streaming opened TransactionBatch and * abandoned it w/o doing any work) or due to {@link #markCleaned(CompactionInfo)} being called, * or the delete from the txns was delayed because of TXN_OPENTXN_TIMEOUT window. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void cleanEmptyAbortedAndCommittedTxns() throws MetaException; /** * This will take all entries assigned to workers * on a host return them to INITIATED state. The initiator should use this at start up to * clean entries from any workers that were in the middle of compacting when the metastore * shutdown. It does not reset entries from worker threads on other hosts as those may still * be working. * @param hostname Name of this host. It is assumed this prefixes the thread's worker id, * so that like hostname% will match the worker id. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent void revokeFromLocalWorkers(String hostname) throws MetaException; /** * This call will return all compaction queue * entries assigned to a worker but over the timeout back to the initiated state. * This should be called by the initiator on start up and occasionally when running to clean up * after dead threads. At start up {@link #revokeFromLocalWorkers(String)} should be called * first. * @param timeout number of milliseconds since start time that should elapse before a worker is * declared dead. */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent void revokeTimedoutWorkers(long timeout) throws MetaException; /** * Queries metastore DB directly to find columns in the table which have statistics information. * If {@code ci} includes partition info then per partition stats info is examined, otherwise * table level stats are examined. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly List<String> findColumnsWithStats(CompactionInfo ci) throws MetaException; /** * For any given compactable entity (partition, table if not partitioned) the history of compactions * may look like "sssfffaaasffss", for example. The idea is to retain the tail (most recent) of the * history such that a configurable number of each type of state is present. Any other entries * can be purged. This scheme has advantage of always retaining the last failure/success even if * it's not recent. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void purgeCompactionHistory() throws MetaException; /** * WriteSet tracking is used to ensure proper transaction isolation. This method deletes the * transaction metadata once it becomes unnecessary. */ @Transactional(POOL_TX) @RetrySemantics.SafeToRetry void performWriteSetGC() throws MetaException; /** * Determine if there are enough consecutive failures compacting a table or partition that no * new automatic compactions should be scheduled. User initiated compactions do not do this * check. * @param ci Table or partition to check. * @return true if it is ok to compact, false if there have been too many failures. * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly boolean checkFailedCompactions(CompactionInfo ci) throws MetaException; @VisibleForTesting @Transactional(POOL_TX) int getNumLocks() throws SQLException, MetaException; @VisibleForTesting long setTimeout(long milliseconds); @VisibleForTesting long getOpenTxnTimeOutMillis(); @VisibleForTesting void setOpenTxnTimeOutMillis(long openTxnTimeOutMillis); @RetrySemantics.Idempotent MutexAPI getMutexAPI(); /** * This is primarily designed to provide coarse-grained mutex support to operations running * inside the Metastore (of which there could be several instances). The initial goal is to * ensure that various sub-processes of the Compactor don't step on each other. * * In RDMBS world each {@code LockHandle} uses a java.sql.Connection so use it sparingly. */ interface MutexAPI { /** * The {@code key} is name of the lock. Will acquire an exclusive lock or block. It returns * a handle which must be used to release the lock. Each invocation returns a new handle. */ @SqlRetry(lockInternally = true) LockHandle acquireLock(String key) throws MetaException; /** * Same as {@link #acquireLock(String)} but takes an already existing handle as input. This * will associate the lock on {@code key} with the same handle. All locks associated with * the same handle will be released together. * @param handle not NULL */ void acquireLock(String key, LockHandle handle) throws MetaException; interface LockHandle extends AutoCloseable { /** * Releases all locks associated with this handle. */ void releaseLocks(); /** * Returns the value of the last update time persisted during the appropriate lock release call. */ Long getLastUpdateTime(); /** * Releases all locks associated with this handle, and persist the value of the last update time. */ void releaseLocks(Long timestamp); } } /** * Once a {@link java.util.concurrent.ThreadPoolExecutor} Worker submits a job to the cluster, * it calls this to update the metadata. * @param id {@link CompactionInfo#id} */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent void setHadoopJobId(String hadoopJobId, long id) throws MetaException; /** * Add the ACID write event information to writeNotificationLog table. * @param acidWriteEvent */ @SqlRetry(lockInternally = true, retryOnDuplicateKey = true) @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent void addWriteNotificationLog(ListenerEvent acidWriteEvent) throws MetaException; /** * Return the currently seen minimum open transaction ID. * @return minimum transaction ID * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent long findMinOpenTxnIdForCleaner() throws MetaException; /** * Returns the compaction running in the transaction txnId * @param txnId transaction Id * @return compaction info * @throws MetaException ex */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly Optional<CompactionInfo> getCompactionByTxnId(long txnId) throws MetaException; /** * Returns the smallest txnid that could be seen in open state across all active transactions in * the system or -1 if there are no active transactions. * @return transaction ID * @deprecated remove when min_history_level table is dropped */ @RetrySemantics.ReadOnly @SqlRetry @Transactional(POOL_COMPACTOR) @Deprecated long findMinTxnIdSeenOpen() throws MetaException; /** * Returns ACID metadata related metrics info. * @return metrics info object */ @SqlRetry @Transactional(POOL_TX) @RetrySemantics.ReadOnly MetricsInfo getMetricsInfo() throws MetaException; /** * Returns ACID metrics related info for a specific resource and metric type. If no record is found matching the * filter criteria, null will be returned. * @param dbName name of database, non-null * @param tblName name of the table, non-null * @param partitionName name of the partition, can be null * @param type type of the delta metric, non-null * @return instance of delta metrics info, can be null * @throws MetaException */ @RetrySemantics.ReadOnly @SqlRetry @Transactional(POOL_COMPACTOR) CompactionMetricsData getCompactionMetricsData(String dbName, String tblName, String partitionName, CompactionMetricsData.MetricType type) throws MetaException; /** * Remove records from the compaction metrics cache matching the filter criteria passed in as parameters * @param dbName name of the database, non-null * @param tblName name of the table, non-null * @param partitionName name of the partition, non-null * @param type type of the delta metric, non-null * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.SafeToRetry void removeCompactionMetricsData(String dbName, String tblName, String partitionName, CompactionMetricsData.MetricType type) throws MetaException; /** * Returns the top ACID metrics from each type {@link CompactionMetricsData.MetricType} * @param limit number of returned records for each type * @return list of metrics, always non-null * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.ReadOnly List<CompactionMetricsData> getTopCompactionMetricsDataPerType(int limit) throws MetaException; /** * Create, update or delete one record in the compaction metrics cache. * <p> * If the metric is not found in the metrics cache, it will be created. * </p> * <p> * If the metric is found, it will be updated. This operation uses an optimistic locking mechanism, meaning if another * operation changed the value of this metric, the update will abort and won't be retried. * </p> * <p> * If the new metric value is below {@link CompactionMetricsData#getThreshold()}, it will be deleted. * </p> * @param data the object that is used for the operation * @return true, if update finished successfully * @throws MetaException */ @SqlRetry @Transactional(POOL_COMPACTOR) @RetrySemantics.Idempotent boolean updateCompactionMetricsData(CompactionMetricsData data) throws MetaException; }
google/gdata-java-client
37,201
java/src/com/google/gdata/client/CoreErrorDomain.java
/* Copyright (c) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gdata.client; import com.google.gdata.util.ErrorDomain; /** This is the error domain class for the Core Error Domain, representing errors thrown by the core server and Java client library. */ public class CoreErrorDomain extends ErrorDomain { private CoreErrorDomain() { super("GData"); } public static final CoreErrorDomain ERR = new CoreErrorDomain(); public final ErrorCode versionNotSupported = new ErrorCode("versionNotSupported") .withInternalReason("Version is not supported"); public final ErrorCode accountDeleted = new ErrorCode("accountDeleted") .withInternalReason("Account deleted"); public final ErrorCode accountDisabled = new ErrorCode("accountDisabled") .withInternalReason("Account disabled"); public final ErrorCode accountUnverified = new ErrorCode("accountUnverified") .withInternalReason("Account unverified"); public final ErrorCode atomFormatRequired = new ErrorCode("atomFormatRequired") .withInternalReason("Batch requires Atom format"); public final ErrorCode batchingNotSupported = new ErrorCode("batchingNotSupported") .withInternalReason("Batching not supported by feed"); public final ErrorCode cantAccessFeedData = new ErrorCode("cantAccessFeedData") .withInternalReason("Unable to access feed data"); public final ErrorCode cantCreateContentGenerator = new ErrorCode("cantCreateContentGenerator") .withInternalReason("Unable to create ContentGenerator instance"); public final ErrorCode cantCreateEntry = new ErrorCode("cantCreateEntry") .withInternalReason("Unable to create entry"); public final ErrorCode cantCreateExtension = new ErrorCode("cantCreateExtension") .withInternalReason("Unable to create extension"); public final ErrorCode cantCreateFeed = new ErrorCode("cantCreateFeed") .withInternalReason("Unable to create feed"); public final ErrorCode cantCreateProvider = new ErrorCode("cantCreateProvider") .withInternalReason("Unable to instantiate provider"); public final ErrorCode cantDecodeCategoryQuery = new ErrorCode("cantDecodeCategoryQuery") .withInternalReason("Unable to decode category query"); public final ErrorCode cannotEditResource = new ErrorCode("cannotEditResource") .withInternalReason("Target resource cannot be edited by client"); public final ErrorCode cantEncodeQueryParams = new ErrorCode("cantEncodeQueryParams") .withInternalReason("Unable to encode query parameters"); public final ErrorCode cantExtractJsonValue = new ErrorCode("cantExtractJsonValue") .withInternalReason("Cannot extract JSON value"); public final ErrorCode cantLoadAuthProviderClass = new ErrorCode("cantLoadAuthProviderClass") .withInternalReason("authProvider class cannot be loaded"); public final ErrorCode cantLoadEntryClass = new ErrorCode("cantLoadEntryClass") .withInternalReason("entry class cannot be loaded"); public final ErrorCode cantLoadExtensionClass = new ErrorCode("cantLoadExtensionClass") .withInternalReason( "Extension classes must implement the Extension interface"); public final ErrorCode cantLoadExtensionPoint = new ErrorCode("cantLoadExtensionPoint") .withInternalReason( "Unable to load ExtensionPoint class"); public final ErrorCode cantLoadFeedClass = new ErrorCode("cantLoadFeedClass") .withInternalReason("feed class cannot be loaded"); public final ErrorCode cantLoadFeedProviderClass = new ErrorCode("cantLoadFeedProviderClass") .withInternalReason("feedProvider class cannot be loaded"); public final ErrorCode cantLoadGeneratorClass = new ErrorCode("cantLoadGeneratorClass") .withInternalReason("Unable to load ContentGenerator class"); public final ErrorCode cantLoadKindAdaptor = new ErrorCode("cantLoadKindAdaptor") .withInternalReason("Unable to load kind adaptor"); public final ErrorCode cantLoadServiceClass = new ErrorCode("cantLoadServiceClass") .withInternalReason("Unable to load serviceClass class"); public final ErrorCode cantWriteMimeMultipart = new ErrorCode("cantWriteMimeMultipart") .withInternalReason("Unable to write MIME multipart message"); public final ErrorCode clientNotWhitelisted = new ErrorCode("clientNotWhitelisted") .withInternalReason("Client not whitelisted for ONLINE access"); public final ErrorCode collectionTitleRequired = new ErrorCode("collectionTitleRequired") .withInternalReason("Collection must contain a title"); public final ErrorCode commentsFeedLinkRequired = new ErrorCode("commentsFeedLinkRequired") .withInternalReason("g:comments/g:feedLink is required"); public final ErrorCode deleteNotSupported = new ErrorCode("deleteNotSupported") .withInternalReason("Delete not supported by feed"); public final ErrorCode duplicateAlt = new ErrorCode("duplicateAlt") .withInternalReason("Duplicate alt mapping"); public final ErrorCode duplicateAttribute = new ErrorCode("duplicateAttribute") .withInternalReason("Duplicate attribute"); public final ErrorCode duplicateAttributeValue = new ErrorCode("duplicateAttributeValue") .withInternalReason("Duplicate attribute value"); public final ErrorCode duplicateContent = new ErrorCode("duplicateContent") .withInternalReason("Duplicate content"); public final ErrorCode duplicateDraft = new ErrorCode("duplicateDraft") .withInternalReason("Duplicate draft"); public final ErrorCode duplicateEmail = new ErrorCode("duplicateEmail") .withInternalReason("Duplicate email"); public final ErrorCode duplicateEntryId = new ErrorCode("duplicateEntryId") .withInternalReason("Duplicate entry ID"); public final ErrorCode duplicateExtension = new ErrorCode("duplicateExtension") .withInternalReason("Duplicate extension element"); public final ErrorCode duplicateFeedId = new ErrorCode("duplicateFeedId") .withInternalReason("Duplicate feed ID"); public final ErrorCode duplicateGenerator = new ErrorCode("duplicateGenerator") .withInternalReason("Duplicate generator"); public final ErrorCode duplicateIcon = new ErrorCode("duplicateIcon") .withInternalReason("Duplicate icon"); public final ErrorCode duplicateItemsPerPage = new ErrorCode("duplicateItemsPerPage") .withInternalReason("Duplicate itemsPerPage"); public final ErrorCode duplicateLogo = new ErrorCode("duplicateLogo") .withInternalReason("Duplicate logo"); public final ErrorCode duplicateName = new ErrorCode("duplicateName") .withInternalReason("Duplicate name"); public final ErrorCode duplicatePathPrefix = new ErrorCode("duplicatePathPrefix") .withInternalReason("Duplicate pathPrefix element"); public final ErrorCode duplicateRights = new ErrorCode("duplicateRights") .withInternalReason("Duplicate rights"); public final ErrorCode duplicateStartIndex = new ErrorCode("duplicateStartIndex") .withInternalReason("Duplicate startIndex"); public final ErrorCode duplicateSubtitle = new ErrorCode("duplicateSubtitle") .withInternalReason("Duplicate subtitle"); public final ErrorCode duplicateSummary = new ErrorCode("duplicateSummary") .withInternalReason("Duplicate summary"); public final ErrorCode duplicateTextNodeValue = new ErrorCode("duplicateTextNodeValue") .withInternalReason("Duplicate text node value"); public final ErrorCode duplicateTitle = new ErrorCode("duplicateTitle") .withInternalReason("Duplicate title"); public final ErrorCode duplicateTotalResults = new ErrorCode("duplicateTotalResults") .withInternalReason("Duplicate totalResults"); public final ErrorCode duplicateUri = new ErrorCode("duplicateUri") .withInternalReason("Duplicate URI"); public final ErrorCode duplicateUrlBase = new ErrorCode("duplicateUrlBase") .withInternalReason("Duplicate urlBase element"); public final ErrorCode duplicateValue = new ErrorCode("duplicateValue") .withInternalReason("Duplicate value"); public final ErrorCode elementNotRepeatable = new ErrorCode("elementNotRepeatable") .withInternalReason("Element is not repeatable"); public final ErrorCode elementNotSimple = new ErrorCode("elementNotSimple") .withInternalReason("Element is not simple"); public final ErrorCode emailValueRequired = new ErrorCode("emailValueRequired") .withInternalReason("email must have a value"); public final ErrorCode entityTagMatches = new ErrorCode("entityTagMatches") .withInternalReason("At least one entity tag matches"); public final ErrorCode entryNotAssociated = new ErrorCode("entryNotAssociated") .withInternalReason("Entry is not associated with a GData service"); public final ErrorCode etagsMismatch = new ErrorCode("etagsMismatch") .withInternalReason("Etags mismatch"); public final ErrorCode etagsUnsupported = new ErrorCode("etagsUnsupported") .withInternalReason("Resource does not support Etags"); public final ErrorCode feedNotAssociated = new ErrorCode("feedNotAssociated") .withInternalReason("Feed is not associated with a GData service"); public final ErrorCode geoPtLatRequired = new ErrorCode("geoPtLatRequired") .withInternalReason("g:geoPt/@lat is required"); public final ErrorCode geoPtLonRequired = new ErrorCode("geoPtLonRequired") .withInternalReason("g:geoPt/@lon is required"); public final ErrorCode headerRequired = new ErrorCode("headerRequired") .withInternalReason("Header required"); public final ErrorCode iconValueRequired = new ErrorCode("iconValueRequired") .withInternalReason("icon must have a value"); public final ErrorCode idRequired = new ErrorCode("idRequired") .withInternalReason("g:originalEvent/@id is required"); public final ErrorCode idValueRequired = new ErrorCode("idValueRequired") .withInternalReason("ID must have a value"); public final ErrorCode illegalInputFormat = new ErrorCode("illegalInputFormat") .withInternalReason( "Input format is not compatible with selected alt output format"); public final ErrorCode imsNotSupported = new ErrorCode("imsNotSupported") .withInternalReason( "If-Modified-Since HTTP precondition not supported on POST"); public final ErrorCode incompatiblePaginationParameters = new ErrorCode("incompatiblePaginationParameters") .withInternalReason("start-token and start-index cannot both " + "be specified at the same time"); public final ErrorCode incorrectDataVersion = new ErrorCode("incorrectDataVersion"); public final ErrorCode insertNotSupported = new ErrorCode("insertNotSupported") .withInternalReason("Insert not supported by feed"); public final ErrorCode insufficientSecurityLevel = new ErrorCode("insufficientSecurityLevel") .withInternalReason("Insufficient security level"); public final ErrorCode invalidAltValue = new ErrorCode("invalidAltValue") .withInternalReason("Invalid alt value for entry"); public final ErrorCode invalidArbitraryXml = new ErrorCode("invalidArbitraryXml") .withInternalReason("Invalid value for arbitrary XML"); public final ErrorCode invalidAttributeValue = new ErrorCode("invalidAttributeValue") .withInternalReason("Invalid value for attribute"); public final ErrorCode invalidAverageRatingAttribute = new ErrorCode("invalidAverageRatingAttribute") .withInternalReason( "gd:rating/@average should lie in between " + "gd:rating/@min and gd:rating/@max"); public final ErrorCode invalidBase64 = new ErrorCode("invalidBase64") .withInternalReason("Expected Base-64 content"); public final ErrorCode invalidBatchOperationType = new ErrorCode("invalidBatchOperationType") .withInternalReason("Invalid type for batch:operation"); public final ErrorCode invalidBigDecimalAttribute = new ErrorCode("invalidBigDecimalAttribute") .withInternalReason("Invalid value for big decimal attribute"); public final ErrorCode invalidBigIntegerAttribute = new ErrorCode("invalidBigIntegerAttribute") .withInternalReason("Invalid value for big integer attribute"); public final ErrorCode invalidBooleanAttribute = new ErrorCode("invalidBooleanAttribute") .withInternalReason("Invalid value for boolean attribute"); public final ErrorCode invalidByteAttribute = new ErrorCode("invalidByteAttribute") .withInternalReason("Invalid value for byte attribute"); public final ErrorCode invalidCacheControlOption = new ErrorCode("invalidCacheControlOption") .withInternalReason("Invalid option in Cache-Control header"); public final ErrorCode invalidCategoryFilter = new ErrorCode("invalidCategoryFilter") .withInternalReason("Invalid category filter"); public final ErrorCode invalidChildElement = new ErrorCode("invalidChildElement") .withInternalReason("Child elements are not allowed."); public final ErrorCode invalidContentType = new ErrorCode("invalidContentType") .withInternalReason("Malformed Content-Type"); public final ErrorCode invalidCountHintAttribute = new ErrorCode("invalidCountHintAttribute") .withInternalReason("Invalid gd:feedLink/@countHint"); public final ErrorCode invalidDatetime = new ErrorCode("invalidDatetime") .withInternalReason("Badly formatted datetime"); public final ErrorCode invalidDoubleAttribute = new ErrorCode("invalidDoubleAttribute") .withInternalReason("Invalid value for double attribute"); public final ErrorCode invalidDraft = new ErrorCode("invalidDraft") .withInternalReason("Invalid value for draft"); public final ErrorCode invalidEndValue = new ErrorCode("invalidEndValue") .withInternalReason("Invalid end value"); public final ErrorCode invalidEnumValue = new ErrorCode("invalidEnumValue") .withInternalReason("Invalid enum value"); public final ErrorCode invalidErrorFormat = new ErrorCode("invalidErrorFormat") .withInternalReason("Invalid error format"); public final ErrorCode invalidExtension = new ErrorCode("invalidExtension") .withInternalReason("Invalid extension element"); public final ErrorCode invalidFieldSelection = new ErrorCode("invalidFieldSelection") .withInternalReason("Invalid field selection"); public final ErrorCode invalidFixedAttribute = new ErrorCode("invalidFixedAttribute") .withInternalReason("Invalid value for fixed attribute"); public final ErrorCode invalidFloatAttribute = new ErrorCode("invalidFloatAttribute") .withInternalReason("Invalid value for float attribute"); public final ErrorCode invalidGeoPtElev = new ErrorCode("invalidGeoPtElev") .withInternalReason("Invalid geoPt/@elev"); public final ErrorCode invalidGeoPtLat = new ErrorCode("invalidGeoPtLat") .withInternalReason("Invalid geoPt/@lat"); public final ErrorCode invalidGeoPtLon = new ErrorCode("invalidGeoPtLon") .withInternalReason("Invalid geoPt/@lon"); public final ErrorCode invalidGeoPtTime = new ErrorCode("invalidGeoPtTime") .withInternalReason("Date/time value expected"); public final ErrorCode invalidIntegerAttribute = new ErrorCode("invalidIntegerAttribute") .withInternalReason("Invalid value for integer attribute"); public final ErrorCode invalidJson = new ErrorCode("invalidJson") .withInternalReason("Invalid JSON"); public final ErrorCode invalidLongAttribute = new ErrorCode("invalidLongAttribute") .withInternalReason("Invalid value for long attribute"); public final ErrorCode invalidMediaSourceUri = new ErrorCode("invalidMediaSourceUri") .withInternalReason("Invalid media source URI"); public final ErrorCode invalidMediaType = new ErrorCode("invalidMediaType") .withInternalReason("Not a valid media type"); public final ErrorCode invalidMethodOverrideHeader = new ErrorCode("invalidMethodOverrideHeader") .withInternalReason("Invalid method override header"); public final ErrorCode invalidMimeType = new ErrorCode("invalidMimeType") .withInternalReason("Malformed MIME type"); public final ErrorCode invalidMixedContent = new ErrorCode("invalidMixedContent") .withInternalReason("Invalid value for mixed content"); public final ErrorCode invalidParameterValue = new ErrorCode("invalidParameterValue") .withInternalReason("Invalid parameter value"); public final ErrorCode invalidRedirectedToUrl = new ErrorCode("invalidRedirectedToUrl") .withInternalReason("Invalid redirected-to URL"); public final ErrorCode invalidPatchTarget = new ErrorCode("invalidPatchTarget") .withInternalReason("Target resource cannot be patched"); public final ErrorCode invalidReminderAbsoluteTime = new ErrorCode("invalidReminderAbsoluteTime") .withInternalReason("Invalid g:reminder/@absoluteTime"); public final ErrorCode invalidReminderDays = new ErrorCode("invalidReminderDays") .withInternalReason("Invalid g:reminder/@days"); public final ErrorCode invalidReminderHours = new ErrorCode("invalidReminderHours") .withInternalReason("Invalid g:reminder/@hours"); public final ErrorCode invalidReminderMethod = new ErrorCode("invalidReminderMethod") .withInternalReason("Invalid g:reminder/@method"); public final ErrorCode invalidReminderMinutes = new ErrorCode("invalidReminderMinutes") .withInternalReason("Invalid g:reminder/@minutes"); public final ErrorCode invalidRequestUri = new ErrorCode("invalidRequestUri") .withInternalReason("Invalid request URI"); public final ErrorCode invalidRequestVersion = new ErrorCode("invalidRequestVersion") .withInternalReason("Invalid request version"); public final ErrorCode invalidResourceVersion = new ErrorCode("invalidResourceVersion") .withInternalReason("Unexpected resource version ID"); public final ErrorCode invalidSecurityProtocol = new ErrorCode("invalidSecurityProtocol") .withInternalReason("Invalid security protocol"); public final ErrorCode invalidServiceClass = new ErrorCode("invalidServiceClass") .withInternalReason("Invalid service class attribute"); public final ErrorCode invalidShortAttribute = new ErrorCode("invalidShortAttribute") .withInternalReason("Invalid value for short attribute"); public final ErrorCode invalidStartValue = new ErrorCode("invalidStartValue") .withInternalReason("Invalid start value"); public final ErrorCode invalidTextContent = new ErrorCode("invalidTextContent") .withInternalReason("Invalid text content"); public final ErrorCode invalidTextContentType = new ErrorCode("invalidTextContentType") .withInternalReason("Invalid text content type"); public final ErrorCode invalidTimeOffset = new ErrorCode("invalidTimeOffset") .withInternalReason("Invalid time offset"); public final ErrorCode invalidToDoDueTime = new ErrorCode("invalidToDoDueTime") .withInternalReason("Invalid g:toDo/@dueTime"); public final ErrorCode invalidToDoHours = new ErrorCode("invalidToDoHours") .withInternalReason("Invalid g:toDo/@hours"); public final ErrorCode invalidUri = new ErrorCode("invalidUri") .withInternalReason("Badly formatted URI"); public final ErrorCode invalidUriTemplate = new ErrorCode("invalidUriTemplate") .withInternalReason("Invalid uriTemplate"); public final ErrorCode invalidUrl = new ErrorCode("invalidUrl") .withInternalReason("Badly formatted URL"); public final ErrorCode invalidValueRatingAttribute = new ErrorCode("invalidValueRatingAttribute") .withInternalReason( "gd:rating/@value should lie in between " + "gd:rating/@min and gd:rating/@max"); public final ErrorCode invalidVersion = new ErrorCode("invalidVersion") .withInternalReason("Invalid version"); public final ErrorCode itemsPerPageNotInteger = new ErrorCode("itemsPerPageNotInteger") .withInternalReason("itemsPerPage is not an integer"); public final ErrorCode lengthNotInteger = new ErrorCode("lengthNotInteger") .withInternalReason("Length must be an integer"); public final ErrorCode logoValueRequired = new ErrorCode("logoValueRequired") .withInternalReason("logo must have a value"); public final ErrorCode matchHeaderRequired = new ErrorCode("matchHeaderRequired") .withInternalReason("If-Match or If-None-Match header required"); public final ErrorCode minGreaterThanMax = new ErrorCode("minGreaterThanMax") .withInternalReason( "'updatedMin' must be less than or equal to 'updatedMax'"); public final ErrorCode missingAddressAttribute = new ErrorCode("missingAddressAttribute") .withInternalReason("g:email/@address is required"); public final ErrorCode missingAltAttribute = new ErrorCode("missingAltAttribute") .withInternalReason("Missing alt attribute"); public final ErrorCode missingAttribute = new ErrorCode("missingAttribute") .withInternalReason("Missing attribute"); public final ErrorCode missingContact = new ErrorCode("missingContact") .withInternalReason("missing contact"); public final ErrorCode missingContentType = new ErrorCode("missingContentType") .withInternalReason("Response contains no content type"); public final ErrorCode missingContentTypeAttribute = new ErrorCode("missingContentTypeAttribute") .withInternalReason("Missing content type attribute"); public final ErrorCode missingConverter = new ErrorCode("missingConverter") .withInternalReason("No converter for type"); public final ErrorCode missingDescription = new ErrorCode("missingDescription") .withInternalReason("missing description"); public final ErrorCode missingEntry = new ErrorCode("missingEntry") .withInternalReason("Entry not found"); public final ErrorCode missingExtensionClass = new ErrorCode("missingExtensionClass") .withInternalReason("Missing extensionClass attribute"); public final ErrorCode missingExtensionElement = new ErrorCode("missingExtensionElement") .withInternalReason("Required extension element"); public final ErrorCode missingFeed = new ErrorCode("missingFeed") .withInternalReason("Feed not found"); public final ErrorCode missingFeedProvider = new ErrorCode("missingFeedProvider") .withInternalReason("No FeedProvider instance"); public final ErrorCode missingFeedProviderClass = new ErrorCode("missingFeedProviderClass") .withInternalReason("Missing feedProviderClass attribute"); public final ErrorCode missingFeedProviderDescription = new ErrorCode("missingFeedProviderDescription") .withInternalReason( "At least one FeedProviderDescription must be specified"); public final ErrorCode missingFormat = new ErrorCode("missingFormat") .withInternalReason("missing format"); public final ErrorCode missingGeneratorClass = new ErrorCode("missingGeneratorClass") .withInternalReason("Missing generatorClass attribute"); public final ErrorCode missingHrefAttribute = new ErrorCode("missingHrefAttribute") .withInternalReason("Link must have an 'href' attribute"); public final ErrorCode missingLocalName = new ErrorCode("missingLocalName") .withInternalReason("Missing localName"); public final ErrorCode missingNameAttribute = new ErrorCode("missingNameAttribute") .withInternalReason("Missing name attribute for customParam"); public final ErrorCode missingNamespace = new ErrorCode("missingNamespace") .withInternalReason("Missing namespace"); public final ErrorCode missingNamespaceDescription = new ErrorCode("missingNamespaceDescription") .withInternalReason("No matching NamespaceDescription"); public final ErrorCode missingPathPrefix = new ErrorCode("missingPathPrefix") .withInternalReason("pathPrefix is missing"); public final ErrorCode missingPatternAttribute = new ErrorCode("missingPatternAttribute") .withInternalReason("Missing pattern attribute for customParam"); public final ErrorCode missingProviderConstructor = new ErrorCode("missingProviderConstructor") .withInternalReason("Provider constructor not found"); public final ErrorCode missingRequiredContent = new ErrorCode("missingRequiredContent") .withInternalReason("Missing required text content"); public final ErrorCode missingResourceVersion = new ErrorCode("missingResourceVersion") .withInternalReason("Missing resource version ID"); public final ErrorCode missingServiceClass = new ErrorCode("missingServiceClass") .withInternalReason("Missing serviceClass attribute"); public final ErrorCode missingShortName = new ErrorCode("missingShortName") .withInternalReason("missing shortName"); public final ErrorCode missingSrcAttribute = new ErrorCode("missingSrcAttribute") .withInternalReason("Missing src attribute"); public final ErrorCode missingTags = new ErrorCode("missingTags") .withInternalReason("missing tags"); public final ErrorCode missingTermAttribute = new ErrorCode("missingTermAttribute") .withInternalReason("Category must have a 'term' attribute"); public final ErrorCode missingTextContent = new ErrorCode("missingTextContent") .withInternalReason("Text content is required for this element."); public final ErrorCode missingUriTemplate = new ErrorCode("missingUriTemplate") .withInternalReason("Missing uriTemplate"); public final ErrorCode missingVersion = new ErrorCode("missingVersion") .withInternalReason("Missing version attribute"); public final ErrorCode mustBeBoolean = new ErrorCode("mustBeBoolean") .withInternalReason("Attribute must be boolean"); public final ErrorCode mustExtendBaseEntry = new ErrorCode("mustExtendBaseEntry") .withInternalReason("entry class must derive from BaseEntry"); public final ErrorCode mustExtendBaseFeed = new ErrorCode("mustExtendBaseFeed") .withInternalReason("feed class must derive from BaseFeed"); public final ErrorCode mustExtendExtensionPoint = new ErrorCode("mustExtendExtensionPoint") .withInternalReason( "Extended classes must extend ExtensionPoint"); public final ErrorCode mustImplementExtension = new ErrorCode("mustImplementExtension") .withInternalReason( "Extension classes must implement the Extension interface"); public final ErrorCode nameRequired = new ErrorCode("nameRequired") .withInternalReason("g:extendedProperty/@name is required"); public final ErrorCode nameValueRequired = new ErrorCode("nameValueRequired") .withInternalReason("name must have a value"); public final ErrorCode noAcceptableType = new ErrorCode("noAcceptableType") .withInternalReason("No acceptable type available"); public final ErrorCode noAcceptableLanguage = new ErrorCode("noAcceptableLanguage") .withInternalReason("No acceptable language available"); public final ErrorCode noAvailableServers = new ErrorCode("noAvailableServers") .withInternalReason("Cannot find any servers"); public final ErrorCode noPostConcurrency = new ErrorCode("noPostConcurrency") .withInternalReason("POST method does not support concurrency"); public final ErrorCode notModifiedSinceTimestamp = new ErrorCode("notModifiedSinceTimestamp") .withInternalReason("Entity not modified since specified time"); public final ErrorCode nullJsonValue = new ErrorCode("nullJsonValue") .withInternalReason("Null JSON values not supported"); public final ErrorCode optionsNotSupported = new ErrorCode("optionsNotSupported") .withInternalReason("OPTIONS is not supported"); public final ErrorCode optimisticConcurrencyNotSupported = new ErrorCode("optimisticConcurrencyNotSupported") .withInternalReason("Optimistic concurrency is no longer supported"); public final ErrorCode partialJsoncUnsupported = new ErrorCode("partialJsonUnsupported") .withInternalReason("Partial operations are not suppported with JSONC"); public final ErrorCode pathPrefixValueRequired = new ErrorCode("pathPrefixValueRequired") .withInternalReason("pathPrefix element must have a value"); public final ErrorCode predicatesNotAllowed = new ErrorCode("predicatesNotAllowed") .withInternalReason( "Cannot specify any predicates with requested content type"); public final ErrorCode quotaExceeded = new ErrorCode("quotaExceeded") .withInternalReason("Insufficient storage quota"); public final ErrorCode rateLimitExceeded = new ErrorCode("rateLimitExceeded") .withInternalReason("Rate limit exceeded, lower query rate"); public final ErrorCode responseMissingContentType = new ErrorCode("responseMissingContentType") .withInternalReason("Response contains no content type"); public final ErrorCode responseMissingEntry = new ErrorCode("responseMissingEntry") .withInternalReason("Response contains no entry"); public final ErrorCode responseMissingFeed = new ErrorCode("responseMissingFeed") .withInternalReason("Response contains no feed"); public final ErrorCode rpcUnsupported = new ErrorCode("rpcUnsupported") .withInternalReason("RPC authentication not enabled"); public final ErrorCode serverOverloaded = new ErrorCode("serverOverloaded") .withInternalReason("Servers are overloaded"); public final ErrorCode startIndexNotInteger = new ErrorCode("startIndexNotInteger") .withInternalReason("startIndex is not an integer"); public final ErrorCode targetFeedReadOnly = new ErrorCode("targetFeedReadOnly") .withInternalReason("Target feed is read-only"); public final ErrorCode textNotAllowed = new ErrorCode("textNotAllowed") .withInternalReason("This element must not have any text() data"); public final ErrorCode timestampAndEntityTagMatch = new ErrorCode("timestampAndEntityTagMatch") .withInternalReason("Timestamp and entity tag match"); public final ErrorCode toDoCompletedRequired = new ErrorCode("toDoCompletedRequired") .withInternalReason("g:toDo/@completed is required"); public final ErrorCode tooManyAttributes = new ErrorCode("tooManyAttributes") .withInternalReason("g:reminder must have zero or one attribute"); public final ErrorCode totalResultsNotInteger = new ErrorCode("totalResultsNotInteger") .withInternalReason("totalResults is not an integer"); public final ErrorCode traceNotSupported = new ErrorCode("traceNotSupported") .withInternalReason("TRACE is not supported"); public final ErrorCode unknownMdbService = new ErrorCode("unknownMdbService") .withInternalReason("Unknown MDB service"); public final ErrorCode unparsableS2SHeader = new ErrorCode("unparsableS2SHeader") .withInternalReason("Error parsing S2S auth header"); public final ErrorCode unrecognizedElement = new ErrorCode("unrecognizedElement") .withInternalReason("Unrecognized element"); public final ErrorCode unrecognizedKey = new ErrorCode("unrecognizedKey") .withInternalReason("Unrecognized key"); public final ErrorCode unrecognizedParserEvent = new ErrorCode("unrecognizedParserEvent") .withInternalReason("Unrecognized parser event"); public final ErrorCode unsupportedContentType = new ErrorCode("unsupportedContentType") .withInternalReason("Unsupported content type"); public final ErrorCode unsupportedEncoding = new ErrorCode("unsupportedEncoding") .withInternalReason("Unsupported encoding"); public final ErrorCode unsupportedFieldsParam = new ErrorCode("unsupportedFieldsParam") .withInternalReason("Fields query parameter is not supported"); public final ErrorCode unsupportedHeader = new ErrorCode("unsupportedHeader") .withInternalReason("Header not supported"); public final ErrorCode unsupportedHeaderIfModifiedSince = new ErrorCode("unsupportedHeaderIfModifiedSince") .withInternalReason("If-Unmodified-Since header not supported"); public final ErrorCode unsupportedHeaderIfNoneMatch = new ErrorCode("unsupportedHeaderIfNoneMatch") .withInternalReason("If-None-Match: * is not supported"); public final ErrorCode unsupportedNullJson = new ErrorCode("unsupportedNullJson") .withInternalReason("Null JSON values not supported"); public final ErrorCode unsupportedOutputFormat = new ErrorCode("unsupportedOutputFormat") .withInternalReason("Unsupported output format"); public final ErrorCode unsupportedQueryParam = new ErrorCode("unsupportedQueryParam") .withInternalReason("Query parameter is not supported"); public final ErrorCode unsupportedQueryRequestType = new ErrorCode("unsupportedQueryRequestType") .withInternalReason("Unsupported query request type"); public final ErrorCode unsupportedQueryType = new ErrorCode("unsupportedQueryType") .withInternalReason("Unsupported query type"); public final ErrorCode unsupportedTextType = new ErrorCode("unsupportedTextType") .withInternalReason( "Unsupported type. Valid types are 'plain' and 'html'"); public final ErrorCode updateNotSupported = new ErrorCode("updateNotSupported") .withInternalReason("Update not supported by feed"); public final ErrorCode updateRequiresFullRepresentation = new ErrorCode("updateRequiresFullRepresentation") .withInternalReason("PUT requires a full resource representation. " + "Use PATCH to update using a partial representation"); public final ErrorCode uriValueRequired = new ErrorCode("uriValueRequired") .withInternalReason("URI must have a value"); public final ErrorCode urlBaseValueRequired = new ErrorCode("urlBaseValueRequired") .withInternalReason("urlBase element must have a value"); public final ErrorCode valueOrAverageRequired = new ErrorCode("valueOrAverageRequired") .withInternalReason( "at least one of g:rating/@value or gd:rating/@average is required"); public final ErrorCode valueOrXmlRequired = new ErrorCode("valueOrXmlRequired") .withInternalReason( "exactly one of g:extendedProperty/@value, XML is required"); public final ErrorCode valueXmlMutuallyExclusive = new ErrorCode("valueXmlMutuallyExclusive") .withInternalReason( "g:extendedProperty/@value and XML are mutually exclusive"); public final ErrorCode whenRequired = new ErrorCode("whenRequired") .withInternalReason("g:when inside g:originalEvent is required"); public final ErrorCode whitelistAccessFailed = new ErrorCode("whitelistAccessFailed") .withInternalReason("Failed to access whitelist"); public final ErrorCode workspaceRequired = new ErrorCode("workspaceRequired") .withInternalReason("Service must contain at least one workspace"); public final ErrorCode workspaceTitleRequired = new ErrorCode("workspaceTitleRequired") .withInternalReason("Workspace must contain a title"); public final ErrorCode uploadTooLarge = new ErrorCode("uploadTooLarge") .withInternalReason("The requested upload is too large"); }
openjdk/jdk8
38,082
jdk/src/share/classes/java/io/PrintWriter.java
/* * Copyright (c) 1996, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.io; import java.util.Objects; import java.util.Formatter; import java.util.Locale; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; /** * Prints formatted representations of objects to a text-output stream. This * class implements all of the <tt>print</tt> methods found in {@link * PrintStream}. It does not contain methods for writing raw bytes, for which * a program should use unencoded byte streams. * * <p> Unlike the {@link PrintStream} class, if automatic flushing is enabled * it will be done only when one of the <tt>println</tt>, <tt>printf</tt>, or * <tt>format</tt> methods is invoked, rather than whenever a newline character * happens to be output. These methods use the platform's own notion of line * separator rather than the newline character. * * <p> Methods in this class never throw I/O exceptions, although some of its * constructors may. The client may inquire as to whether any errors have * occurred by invoking {@link #checkError checkError()}. * * @author Frank Yellin * @author Mark Reinhold * @since JDK1.1 */ public class PrintWriter extends Writer { /** * The underlying character-output stream of this * <code>PrintWriter</code>. * * @since 1.2 */ protected Writer out; private final boolean autoFlush; private boolean trouble = false; private Formatter formatter; private PrintStream psOut = null; /** * Line separator string. This is the value of the line.separator * property at the moment that the stream was created. */ private final String lineSeparator; /** * Returns a charset object for the given charset name. * @throws NullPointerException is csn is null * @throws UnsupportedEncodingException if the charset is not supported */ private static Charset toCharset(String csn) throws UnsupportedEncodingException { Objects.requireNonNull(csn, "charsetName"); try { return Charset.forName(csn); } catch (IllegalCharsetNameException|UnsupportedCharsetException unused) { // UnsupportedEncodingException should be thrown throw new UnsupportedEncodingException(csn); } } /** * Creates a new PrintWriter, without automatic line flushing. * * @param out A character-output stream */ public PrintWriter (Writer out) { this(out, false); } /** * Creates a new PrintWriter. * * @param out A character-output stream * @param autoFlush A boolean; if true, the <tt>println</tt>, * <tt>printf</tt>, or <tt>format</tt> methods will * flush the output buffer */ public PrintWriter(Writer out, boolean autoFlush) { super(out); this.out = out; this.autoFlush = autoFlush; lineSeparator = java.security.AccessController.doPrivileged( new sun.security.action.GetPropertyAction("line.separator")); } /** * Creates a new PrintWriter, without automatic line flushing, from an * existing OutputStream. This convenience constructor creates the * necessary intermediate OutputStreamWriter, which will convert characters * into bytes using the default character encoding. * * @param out An output stream * * @see java.io.OutputStreamWriter#OutputStreamWriter(java.io.OutputStream) */ public PrintWriter(OutputStream out) { this(out, false); } /** * Creates a new PrintWriter from an existing OutputStream. This * convenience constructor creates the necessary intermediate * OutputStreamWriter, which will convert characters into bytes using the * default character encoding. * * @param out An output stream * @param autoFlush A boolean; if true, the <tt>println</tt>, * <tt>printf</tt>, or <tt>format</tt> methods will * flush the output buffer * * @see java.io.OutputStreamWriter#OutputStreamWriter(java.io.OutputStream) */ public PrintWriter(OutputStream out, boolean autoFlush) { this(new BufferedWriter(new OutputStreamWriter(out)), autoFlush); // save print stream for error propagation if (out instanceof java.io.PrintStream) { psOut = (PrintStream) out; } } /** * Creates a new PrintWriter, without automatic line flushing, with the * specified file name. This convenience constructor creates the necessary * intermediate {@link java.io.OutputStreamWriter OutputStreamWriter}, * which will encode characters using the {@linkplain * java.nio.charset.Charset#defaultCharset() default charset} for this * instance of the Java virtual machine. * * @param fileName * The name of the file to use as the destination of this writer. * If the file exists then it will be truncated to zero size; * otherwise, a new file will be created. The output will be * written to the file and is buffered. * * @throws FileNotFoundException * If the given string does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(fileName)} denies write * access to the file * * @since 1.5 */ public PrintWriter(String fileName) throws FileNotFoundException { this(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName))), false); } /* Private constructor */ private PrintWriter(Charset charset, File file) throws FileNotFoundException { this(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), charset)), false); } /** * Creates a new PrintWriter, without automatic line flushing, with the * specified file name and charset. This convenience constructor creates * the necessary intermediate {@link java.io.OutputStreamWriter * OutputStreamWriter}, which will encode characters using the provided * charset. * * @param fileName * The name of the file to use as the destination of this writer. * If the file exists then it will be truncated to zero size; * otherwise, a new file will be created. The output will be * written to the file and is buffered. * * @param csn * The name of a supported {@linkplain java.nio.charset.Charset * charset} * * @throws FileNotFoundException * If the given string does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(fileName)} denies write * access to the file * * @throws UnsupportedEncodingException * If the named charset is not supported * * @since 1.5 */ public PrintWriter(String fileName, String csn) throws FileNotFoundException, UnsupportedEncodingException { this(toCharset(csn), new File(fileName)); } /** * Creates a new PrintWriter, without automatic line flushing, with the * specified file. This convenience constructor creates the necessary * intermediate {@link java.io.OutputStreamWriter OutputStreamWriter}, * which will encode characters using the {@linkplain * java.nio.charset.Charset#defaultCharset() default charset} for this * instance of the Java virtual machine. * * @param file * The file to use as the destination of this writer. If the file * exists then it will be truncated to zero size; otherwise, a new * file will be created. The output will be written to the file * and is buffered. * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(file.getPath())} * denies write access to the file * * @since 1.5 */ public PrintWriter(File file) throws FileNotFoundException { this(new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file))), false); } /** * Creates a new PrintWriter, without automatic line flushing, with the * specified file and charset. This convenience constructor creates the * necessary intermediate {@link java.io.OutputStreamWriter * OutputStreamWriter}, which will encode characters using the provided * charset. * * @param file * The file to use as the destination of this writer. If the file * exists then it will be truncated to zero size; otherwise, a new * file will be created. The output will be written to the file * and is buffered. * * @param csn * The name of a supported {@linkplain java.nio.charset.Charset * charset} * * @throws FileNotFoundException * If the given file object does not denote an existing, writable * regular file and a new regular file of that name cannot be * created, or if some other error occurs while opening or * creating the file * * @throws SecurityException * If a security manager is present and {@link * SecurityManager#checkWrite checkWrite(file.getPath())} * denies write access to the file * * @throws UnsupportedEncodingException * If the named charset is not supported * * @since 1.5 */ public PrintWriter(File file, String csn) throws FileNotFoundException, UnsupportedEncodingException { this(toCharset(csn), file); } /** Checks to make sure that the stream has not been closed */ private void ensureOpen() throws IOException { if (out == null) throw new IOException("Stream closed"); } /** * Flushes the stream. * @see #checkError() */ public void flush() { try { synchronized (lock) { ensureOpen(); out.flush(); } } catch (IOException x) { trouble = true; } } /** * Closes the stream and releases any system resources associated * with it. Closing a previously closed stream has no effect. * * @see #checkError() */ public void close() { try { synchronized (lock) { if (out == null) return; out.close(); out = null; } } catch (IOException x) { trouble = true; } } /** * Flushes the stream if it's not closed and checks its error state. * * @return <code>true</code> if the print stream has encountered an error, * either on the underlying output stream or during a format * conversion. */ public boolean checkError() { if (out != null) { flush(); } if (out instanceof java.io.PrintWriter) { PrintWriter pw = (PrintWriter) out; return pw.checkError(); } else if (psOut != null) { return psOut.checkError(); } return trouble; } /** * Indicates that an error has occurred. * * <p> This method will cause subsequent invocations of {@link * #checkError()} to return <tt>true</tt> until {@link * #clearError()} is invoked. */ protected void setError() { trouble = true; } /** * Clears the error state of this stream. * * <p> This method will cause subsequent invocations of {@link * #checkError()} to return <tt>false</tt> until another write * operation fails and invokes {@link #setError()}. * * @since 1.6 */ protected void clearError() { trouble = false; } /* * Exception-catching, synchronized output operations, * which also implement the write() methods of Writer */ /** * Writes a single character. * @param c int specifying a character to be written. */ public void write(int c) { try { synchronized (lock) { ensureOpen(); out.write(c); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /** * Writes A Portion of an array of characters. * @param buf Array of characters * @param off Offset from which to start writing characters * @param len Number of characters to write */ public void write(char buf[], int off, int len) { try { synchronized (lock) { ensureOpen(); out.write(buf, off, len); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /** * Writes an array of characters. This method cannot be inherited from the * Writer class because it must suppress I/O exceptions. * @param buf Array of characters to be written */ public void write(char buf[]) { write(buf, 0, buf.length); } /** * Writes a portion of a string. * @param s A String * @param off Offset from which to start writing characters * @param len Number of characters to write */ public void write(String s, int off, int len) { try { synchronized (lock) { ensureOpen(); out.write(s, off, len); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /** * Writes a string. This method cannot be inherited from the Writer class * because it must suppress I/O exceptions. * @param s String to be written */ public void write(String s) { write(s, 0, s.length()); } private void newLine() { try { synchronized (lock) { ensureOpen(); out.write(lineSeparator); if (autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } } /* Methods that do not terminate lines */ /** * Prints a boolean value. The string produced by <code>{@link * java.lang.String#valueOf(boolean)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link * #write(int)}</code> method. * * @param b The <code>boolean</code> to be printed */ public void print(boolean b) { write(b ? "true" : "false"); } /** * Prints a character. The character is translated into one or more bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link * #write(int)}</code> method. * * @param c The <code>char</code> to be printed */ public void print(char c) { write(c); } /** * Prints an integer. The string produced by <code>{@link * java.lang.String#valueOf(int)}</code> is translated into bytes according * to the platform's default character encoding, and these bytes are * written in exactly the manner of the <code>{@link #write(int)}</code> * method. * * @param i The <code>int</code> to be printed * @see java.lang.Integer#toString(int) */ public void print(int i) { write(String.valueOf(i)); } /** * Prints a long integer. The string produced by <code>{@link * java.lang.String#valueOf(long)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link #write(int)}</code> * method. * * @param l The <code>long</code> to be printed * @see java.lang.Long#toString(long) */ public void print(long l) { write(String.valueOf(l)); } /** * Prints a floating-point number. The string produced by <code>{@link * java.lang.String#valueOf(float)}</code> is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link #write(int)}</code> * method. * * @param f The <code>float</code> to be printed * @see java.lang.Float#toString(float) */ public void print(float f) { write(String.valueOf(f)); } /** * Prints a double-precision floating-point number. The string produced by * <code>{@link java.lang.String#valueOf(double)}</code> is translated into * bytes according to the platform's default character encoding, and these * bytes are written in exactly the manner of the <code>{@link * #write(int)}</code> method. * * @param d The <code>double</code> to be printed * @see java.lang.Double#toString(double) */ public void print(double d) { write(String.valueOf(d)); } /** * Prints an array of characters. The characters are converted into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link #write(int)}</code> * method. * * @param s The array of chars to be printed * * @throws NullPointerException If <code>s</code> is <code>null</code> */ public void print(char s[]) { write(s); } /** * Prints a string. If the argument is <code>null</code> then the string * <code>"null"</code> is printed. Otherwise, the string's characters are * converted into bytes according to the platform's default character * encoding, and these bytes are written in exactly the manner of the * <code>{@link #write(int)}</code> method. * * @param s The <code>String</code> to be printed */ public void print(String s) { if (s == null) { s = "null"; } write(s); } /** * Prints an object. The string produced by the <code>{@link * java.lang.String#valueOf(Object)}</code> method is translated into bytes * according to the platform's default character encoding, and these bytes * are written in exactly the manner of the <code>{@link #write(int)}</code> * method. * * @param obj The <code>Object</code> to be printed * @see java.lang.Object#toString() */ public void print(Object obj) { write(String.valueOf(obj)); } /* Methods that do terminate lines */ /** * Terminates the current line by writing the line separator string. The * line separator string is defined by the system property * <code>line.separator</code>, and is not necessarily a single newline * character (<code>'\n'</code>). */ public void println() { newLine(); } /** * Prints a boolean value and then terminates the line. This method behaves * as though it invokes <code>{@link #print(boolean)}</code> and then * <code>{@link #println()}</code>. * * @param x the <code>boolean</code> value to be printed */ public void println(boolean x) { synchronized (lock) { print(x); println(); } } /** * Prints a character and then terminates the line. This method behaves as * though it invokes <code>{@link #print(char)}</code> and then <code>{@link * #println()}</code>. * * @param x the <code>char</code> value to be printed */ public void println(char x) { synchronized (lock) { print(x); println(); } } /** * Prints an integer and then terminates the line. This method behaves as * though it invokes <code>{@link #print(int)}</code> and then <code>{@link * #println()}</code>. * * @param x the <code>int</code> value to be printed */ public void println(int x) { synchronized (lock) { print(x); println(); } } /** * Prints a long integer and then terminates the line. This method behaves * as though it invokes <code>{@link #print(long)}</code> and then * <code>{@link #println()}</code>. * * @param x the <code>long</code> value to be printed */ public void println(long x) { synchronized (lock) { print(x); println(); } } /** * Prints a floating-point number and then terminates the line. This method * behaves as though it invokes <code>{@link #print(float)}</code> and then * <code>{@link #println()}</code>. * * @param x the <code>float</code> value to be printed */ public void println(float x) { synchronized (lock) { print(x); println(); } } /** * Prints a double-precision floating-point number and then terminates the * line. This method behaves as though it invokes <code>{@link * #print(double)}</code> and then <code>{@link #println()}</code>. * * @param x the <code>double</code> value to be printed */ public void println(double x) { synchronized (lock) { print(x); println(); } } /** * Prints an array of characters and then terminates the line. This method * behaves as though it invokes <code>{@link #print(char[])}</code> and then * <code>{@link #println()}</code>. * * @param x the array of <code>char</code> values to be printed */ public void println(char x[]) { synchronized (lock) { print(x); println(); } } /** * Prints a String and then terminates the line. This method behaves as * though it invokes <code>{@link #print(String)}</code> and then * <code>{@link #println()}</code>. * * @param x the <code>String</code> value to be printed */ public void println(String x) { synchronized (lock) { print(x); println(); } } /** * Prints an Object and then terminates the line. This method calls * at first String.valueOf(x) to get the printed object's string value, * then behaves as * though it invokes <code>{@link #print(String)}</code> and then * <code>{@link #println()}</code>. * * @param x The <code>Object</code> to be printed. */ public void println(Object x) { String s = String.valueOf(x); synchronized (lock) { print(s); println(); } } /** * A convenience method to write a formatted string to this writer using * the specified format string and arguments. If automatic flushing is * enabled, calls to this method will flush the output buffer. * * <p> An invocation of this method of the form <tt>out.printf(format, * args)</tt> behaves in exactly the same way as the invocation * * <pre> * out.format(format, args) </pre> * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a>. * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This writer * * @since 1.5 */ public PrintWriter printf(String format, Object ... args) { return format(format, args); } /** * A convenience method to write a formatted string to this writer using * the specified format string and arguments. If automatic flushing is * enabled, calls to this method will flush the output buffer. * * <p> An invocation of this method of the form <tt>out.printf(l, format, * args)</tt> behaves in exactly the same way as the invocation * * <pre> * out.format(l, format, args) </pre> * * @param l * The {@linkplain java.util.Locale locale} to apply during * formatting. If <tt>l</tt> is <tt>null</tt> then no localization * is applied. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a>. * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This writer * * @since 1.5 */ public PrintWriter printf(Locale l, String format, Object ... args) { return format(l, format, args); } /** * Writes a formatted string to this writer using the specified format * string and arguments. If automatic flushing is enabled, calls to this * method will flush the output buffer. * * <p> The locale always used is the one returned by {@link * java.util.Locale#getDefault() Locale.getDefault()}, regardless of any * previous invocations of other formatting methods on this object. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a>. * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * Formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This writer * * @since 1.5 */ public PrintWriter format(String format, Object ... args) { try { synchronized (lock) { ensureOpen(); if ((formatter == null) || (formatter.locale() != Locale.getDefault())) formatter = new Formatter(this); formatter.format(Locale.getDefault(), format, args); if (autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } return this; } /** * Writes a formatted string to this writer using the specified format * string and arguments. If automatic flushing is enabled, calls to this * method will flush the output buffer. * * @param l * The {@linkplain java.util.Locale locale} to apply during * formatting. If <tt>l</tt> is <tt>null</tt> then no localization * is applied. * * @param format * A format string as described in <a * href="../util/Formatter.html#syntax">Format string syntax</a>. * * @param args * Arguments referenced by the format specifiers in the format * string. If there are more arguments than format specifiers, the * extra arguments are ignored. The number of arguments is * variable and may be zero. The maximum number of arguments is * limited by the maximum dimension of a Java array as defined by * <cite>The Java&trade; Virtual Machine Specification</cite>. * The behaviour on a * <tt>null</tt> argument depends on the <a * href="../util/Formatter.html#syntax">conversion</a>. * * @throws java.util.IllegalFormatException * If a format string contains an illegal syntax, a format * specifier that is incompatible with the given arguments, * insufficient arguments given the format string, or other * illegal conditions. For specification of all possible * formatting errors, see the <a * href="../util/Formatter.html#detail">Details</a> section of the * formatter class specification. * * @throws NullPointerException * If the <tt>format</tt> is <tt>null</tt> * * @return This writer * * @since 1.5 */ public PrintWriter format(Locale l, String format, Object ... args) { try { synchronized (lock) { ensureOpen(); if ((formatter == null) || (formatter.locale() != l)) formatter = new Formatter(this, l); formatter.format(l, format, args); if (autoFlush) out.flush(); } } catch (InterruptedIOException x) { Thread.currentThread().interrupt(); } catch (IOException x) { trouble = true; } return this; } /** * Appends the specified character sequence to this writer. * * <p> An invocation of this method of the form <tt>out.append(csq)</tt> * behaves in exactly the same way as the invocation * * <pre> * out.write(csq.toString()) </pre> * * <p> Depending on the specification of <tt>toString</tt> for the * character sequence <tt>csq</tt>, the entire sequence may not be * appended. For instance, invoking the <tt>toString</tt> method of a * character buffer will return a subsequence whose content depends upon * the buffer's position and limit. * * @param csq * The character sequence to append. If <tt>csq</tt> is * <tt>null</tt>, then the four characters <tt>"null"</tt> are * appended to this writer. * * @return This writer * * @since 1.5 */ public PrintWriter append(CharSequence csq) { if (csq == null) write("null"); else write(csq.toString()); return this; } /** * Appends a subsequence of the specified character sequence to this writer. * * <p> An invocation of this method of the form <tt>out.append(csq, start, * end)</tt> when <tt>csq</tt> is not <tt>null</tt>, behaves in * exactly the same way as the invocation * * <pre> * out.write(csq.subSequence(start, end).toString()) </pre> * * @param csq * The character sequence from which a subsequence will be * appended. If <tt>csq</tt> is <tt>null</tt>, then characters * will be appended as if <tt>csq</tt> contained the four * characters <tt>"null"</tt>. * * @param start * The index of the first character in the subsequence * * @param end * The index of the character following the last character in the * subsequence * * @return This writer * * @throws IndexOutOfBoundsException * If <tt>start</tt> or <tt>end</tt> are negative, <tt>start</tt> * is greater than <tt>end</tt>, or <tt>end</tt> is greater than * <tt>csq.length()</tt> * * @since 1.5 */ public PrintWriter append(CharSequence csq, int start, int end) { CharSequence cs = (csq == null ? "null" : csq); write(cs.subSequence(start, end).toString()); return this; } /** * Appends the specified character to this writer. * * <p> An invocation of this method of the form <tt>out.append(c)</tt> * behaves in exactly the same way as the invocation * * <pre> * out.write(c) </pre> * * @param c * The 16-bit character to append * * @return This writer * * @since 1.5 */ public PrintWriter append(char c) { write(c); return this; } }
google/j2objc
35,719
jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/math/BigDecimalTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.math; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; public class BigDecimalTest extends junit.framework.TestCase { BigInteger value = new BigInteger("12345908"); BigInteger value2 = new BigInteger("12334560000"); /** * @tests java.math.BigDecimal#BigDecimal(java.math.BigInteger) */ public void test_ConstructorLjava_math_BigInteger() { BigDecimal big = new BigDecimal(value); assertTrue("the BigDecimal value is not initialized properly", big .unscaledValue().equals(value) && big.scale() == 0); } /** * @tests java.math.BigDecimal#BigDecimal(java.math.BigInteger, int) */ public void test_ConstructorLjava_math_BigIntegerI() { BigDecimal big = new BigDecimal(value2, 5); assertTrue("the BigDecimal value is not initialized properly", big .unscaledValue().equals(value2) && big.scale() == 5); assertTrue("the BigDecimal value is not represented properly", big .toString().equals("123345.60000")); } /** * @tests java.math.BigDecimal#BigDecimal(double) */ public void test_ConstructorD() { BigDecimal big = new BigDecimal(123E04); assertTrue( "the BigDecimal value taking a double argument is not initialized properly", big.toString().equals("1230000")); big = new BigDecimal(1.2345E-12); assertTrue("the double representation is not correct", big .doubleValue() == 1.2345E-12); big = new BigDecimal(-12345E-3); assertTrue("the double representation is not correct", big .doubleValue() == -12.345); big = new BigDecimal(5.1234567897654321e138); assertTrue("the double representation is not correct", big .doubleValue() == 5.1234567897654321E138 && big.scale() == 0); big = new BigDecimal(0.1); assertTrue( "the double representation of 0.1 bigDecimal is not correct", big.doubleValue() == 0.1); big = new BigDecimal(0.00345); assertTrue( "the double representation of 0.00345 bigDecimal is not correct", big.doubleValue() == 0.00345); // regression test for HARMONY-2429 big = new BigDecimal(-0.0); assertTrue( "the double representation of -0.0 bigDecimal is not correct", big.scale() == 0); } /** * @tests java.math.BigDecimal#BigDecimal(java.lang.String) */ public void test_ConstructorLjava_lang_String() throws NumberFormatException { BigDecimal big = new BigDecimal("345.23499600293850"); assertTrue("the BigDecimal value is not initialized properly", big .toString().equals("345.23499600293850") && big.scale() == 14); big = new BigDecimal("-12345"); assertTrue("the BigDecimal value is not initialized properly", big .toString().equals("-12345") && big.scale() == 0); big = new BigDecimal("123."); assertTrue("the BigDecimal value is not initialized properly", big .toString().equals("123") && big.scale() == 0); new BigDecimal("1.234E02"); } /** * @tests java.math.BigDecimal#BigDecimal(java.lang.String) */ public void test_constructor_String_plus_exp() { /* * BigDecimal does not support a + sign in the exponent when converting * from a String */ new BigDecimal(+23e-0); new BigDecimal(-23e+0); } /** * @tests java.math.BigDecimal#BigDecimal(java.lang.String) */ public void test_constructor_String_empty() { try { new BigDecimal(""); fail("NumberFormatException expected"); } catch (NumberFormatException e) { } } /** * @tests java.math.BigDecimal#BigDecimal(java.lang.String) */ public void test_constructor_String_plus_minus_exp() { try { new BigDecimal("+35e+-2"); fail("NumberFormatException expected"); } catch (NumberFormatException e) { } try { new BigDecimal("-35e-+2"); fail("NumberFormatException expected"); } catch (NumberFormatException e) { } } /** * @tests java.math.BigDecimal#BigDecimal(char[]) */ public void test_constructor_CC_plus_minus_exp() { try { new BigDecimal("+35e+-2".toCharArray()); fail("NumberFormatException expected"); } catch (NumberFormatException e) { } try { new BigDecimal("-35e-+2".toCharArray()); fail("NumberFormatException expected"); } catch (NumberFormatException e) { } } /** * @tests java.math.BigDecimal#abs() */ public void test_abs() { BigDecimal big = new BigDecimal("-1234"); BigDecimal bigabs = big.abs(); assertTrue("the absolute value of -1234 is not 1234", bigabs.toString() .equals("1234")); big = new BigDecimal(new BigInteger("2345"), 2); bigabs = big.abs(); assertTrue("the absolute value of 23.45 is not 23.45", bigabs .toString().equals("23.45")); } /** * @tests java.math.BigDecimal#add(java.math.BigDecimal) */ public void test_addLjava_math_BigDecimal() { BigDecimal add1 = new BigDecimal("23.456"); BigDecimal add2 = new BigDecimal("3849.235"); BigDecimal sum = add1.add(add2); assertTrue("the sum of 23.456 + 3849.235 is wrong", sum.unscaledValue() .toString().equals("3872691") && sum.scale() == 3); assertTrue("the sum of 23.456 + 3849.235 is not printed correctly", sum .toString().equals("3872.691")); BigDecimal add3 = new BigDecimal(12.34E02D); assertTrue("the sum of 23.456 + 12.34E02 is not printed correctly", (add1.add(add3)).toString().equals("1257.456")); } /** * @tests java.math.BigDecimal#compareTo(java.math.BigDecimal) */ public void test_compareToLjava_math_BigDecimal() { BigDecimal comp1 = new BigDecimal("1.00"); BigDecimal comp2 = new BigDecimal(1.000000D); assertTrue("1.00 and 1.000000 should be equal", comp1.compareTo(comp2) == 0); BigDecimal comp3 = new BigDecimal("1.02"); assertTrue("1.02 should be bigger than 1.00", comp3.compareTo(comp1) == 1); BigDecimal comp4 = new BigDecimal(0.98D); assertTrue("0.98 should be less than 1.00", comp4.compareTo(comp1) == -1); } /** * @tests java.math.BigDecimal#divide(java.math.BigDecimal, int) */ public void test_divideLjava_math_BigDecimalI() { BigDecimal divd1 = new BigDecimal(value, 2); BigDecimal divd2 = new BigDecimal("2.335"); BigDecimal divd3 = divd1.divide(divd2, BigDecimal.ROUND_UP); assertTrue("123459.08/2.335 is not correct", divd3.toString().equals( "52873.27") && divd3.scale() == divd1.scale()); assertTrue( "the unscaledValue representation of 123459.08/2.335 is not correct", divd3.unscaledValue().toString().equals("5287327")); divd2 = new BigDecimal(123.4D); divd3 = divd1.divide(divd2, BigDecimal.ROUND_DOWN); assertTrue("123459.08/123.4 is not correct", divd3.toString().equals( "1000.47") && divd3.scale() == 2); divd2 = new BigDecimal(000D); try { divd1.divide(divd2, BigDecimal.ROUND_DOWN); fail("divide by zero is not caught"); } catch (ArithmeticException e) { } } /** * @tests java.math.BigDecimal#divide(java.math.BigDecimal, int, int) */ public void test_divideLjava_math_BigDecimalII() { BigDecimal divd1 = new BigDecimal(value2, 4); BigDecimal divd2 = new BigDecimal("0.0023"); BigDecimal divd3 = divd1.divide(divd2, 3, BigDecimal.ROUND_HALF_UP); assertTrue("1233456/0.0023 is not correct", divd3.toString().equals( "536285217.391") && divd3.scale() == 3); divd2 = new BigDecimal(1345.5E-02D); divd3 = divd1.divide(divd2, 0, BigDecimal.ROUND_DOWN); assertTrue( "1233456/13.455 is not correct or does not have the correct scale", divd3.toString().equals("91672") && divd3.scale() == 0); divd2 = new BigDecimal(0000D); try { divd1.divide(divd2, 4, BigDecimal.ROUND_DOWN); fail("divide by zero is not caught"); } catch (ArithmeticException e) { } } /** * @tests java.math.BigDecimal#doubleValue() */ public void test_doubleValue() { BigDecimal bigDB = new BigDecimal(-1.234E-112); // Commenting out this part because it causes an endless loop (see HARMONY-319 and HARMONY-329) // assertTrue( // "the double representation of this BigDecimal is not correct", // bigDB.doubleValue() == -1.234E-112); bigDB = new BigDecimal(5.00E-324); assertTrue("the double representation of bigDecimal is not correct", bigDB.doubleValue() == 5.00E-324); bigDB = new BigDecimal(1.79E308); assertTrue("the double representation of bigDecimal is not correct", bigDB.doubleValue() == 1.79E308 && bigDB.scale() == 0); bigDB = new BigDecimal(-2.33E102); assertTrue( "the double representation of bigDecimal -2.33E102 is not correct", bigDB.doubleValue() == -2.33E102 && bigDB.scale() == 0); bigDB = new BigDecimal(Double.MAX_VALUE); bigDB = bigDB.add(bigDB); assertTrue( "a + number out of the double range should return infinity", bigDB.doubleValue() == Double.POSITIVE_INFINITY); bigDB = new BigDecimal(-Double.MAX_VALUE); bigDB = bigDB.add(bigDB); assertTrue( "a - number out of the double range should return neg infinity", bigDB.doubleValue() == Double.NEGATIVE_INFINITY); } /** * @tests java.math.BigDecimal#equals(java.lang.Object) */ public void test_equalsLjava_lang_Object() { BigDecimal equal1 = new BigDecimal(1.00D); BigDecimal equal2 = new BigDecimal("1.0"); assertFalse("1.00 and 1.0 should not be equal", equal1.equals(equal2)); equal2 = new BigDecimal(1.01D); assertFalse("1.00 and 1.01 should not be equal", equal1.equals(equal2)); equal2 = new BigDecimal("1.00"); assertFalse("1.00D and 1.00 should not be equal", equal1.equals(equal2)); BigInteger val = new BigInteger("100"); equal1 = new BigDecimal("1.00"); equal2 = new BigDecimal(val, 2); assertTrue("1.00(string) and 1.00(bigInteger) should be equal", equal1 .equals(equal2)); equal1 = new BigDecimal(100D); equal2 = new BigDecimal("2.34576"); assertFalse("100D and 2.34576 should not be equal", equal1 .equals(equal2)); assertFalse("bigDecimal 100D does not equal string 23415", equal1 .equals("23415")); } /** * @tests java.math.BigDecimal#floatValue() */ public void test_floatValue() { BigDecimal fl1 = new BigDecimal("234563782344567"); assertTrue("the float representation of bigDecimal 234563782344567", fl1.floatValue() == 234563782344567f); BigDecimal fl2 = new BigDecimal(2.345E37); assertTrue("the float representation of bigDecimal 2.345E37", fl2 .floatValue() == 2.345E37F); fl2 = new BigDecimal(-1.00E-44); assertTrue("the float representation of bigDecimal -1.00E-44", fl2 .floatValue() == -1.00E-44F); fl2 = new BigDecimal(-3E12); assertTrue("the float representation of bigDecimal -3E12", fl2 .floatValue() == -3E12F); fl2 = new BigDecimal(Double.MAX_VALUE); assertTrue( "A number can't be represented by float should return infinity", fl2.floatValue() == Float.POSITIVE_INFINITY); fl2 = new BigDecimal(-Double.MAX_VALUE); assertTrue( "A number can't be represented by float should return infinity", fl2.floatValue() == Float.NEGATIVE_INFINITY); } /** * @tests java.math.BigDecimal#hashCode() */ public void test_hashCode() { // anything that is equal must have the same hashCode BigDecimal hash = new BigDecimal("1.00"); BigDecimal hash2 = new BigDecimal(1.00D); assertTrue("the hashCode of 1.00 and 1.00D is equal", hash.hashCode() != hash2.hashCode() && !hash.equals(hash2)); hash2 = new BigDecimal("1.0"); assertTrue("the hashCode of 1.0 and 1.00 is equal", hash.hashCode() != hash2.hashCode() && !hash.equals(hash2)); BigInteger val = new BigInteger("100"); hash2 = new BigDecimal(val, 2); assertTrue("hashCode of 1.00 and 1.00(bigInteger) is not equal", hash .hashCode() == hash2.hashCode() && hash.equals(hash2)); hash = new BigDecimal(value, 2); hash2 = new BigDecimal("-1233456.0000"); assertTrue("hashCode of 123459.08 and -1233456.0000 is not equal", hash .hashCode() != hash2.hashCode() && !hash.equals(hash2)); hash2 = new BigDecimal(value.negate(), 2); assertTrue("hashCode of 123459.08 and -123459.08 is not equal", hash .hashCode() != hash2.hashCode() && !hash.equals(hash2)); } /** * @tests java.math.BigDecimal#intValue() */ public void test_intValue() { BigDecimal int1 = new BigDecimal(value, 3); assertTrue("the int value of 12345.908 is not 12345", int1.intValue() == 12345); int1 = new BigDecimal("1.99"); assertTrue("the int value of 1.99 is not 1", int1.intValue() == 1); int1 = new BigDecimal("23423419083091823091283933"); // ran JDK and found representation for the above was -249268259 assertTrue("the int value of 23423419083091823091283933 is wrong", int1 .intValue() == -249268259); int1 = new BigDecimal(-1235D); assertTrue("the int value of -1235 is not -1235", int1.intValue() == -1235); } /** * @tests java.math.BigDecimal#longValue() */ public void test_longValue() { BigDecimal long1 = new BigDecimal(value2.negate(), 0); assertTrue("the long value of 12334560000 is not 12334560000", long1 .longValue() == -12334560000L); long1 = new BigDecimal(-1345.348E-123D); assertTrue("the long value of -1345.348E-123D is not zero", long1 .longValue() == 0); long1 = new BigDecimal("31323423423419083091823091283933"); // ran JDK and found representation for the above was // -5251313250005125155 assertTrue( "the long value of 31323423423419083091823091283933 is wrong", long1.longValue() == -5251313250005125155L); } /** * @tests java.math.BigDecimal#max(java.math.BigDecimal) */ public void test_maxLjava_math_BigDecimal() { BigDecimal max1 = new BigDecimal(value2, 1); BigDecimal max2 = new BigDecimal(value2, 4); assertTrue("1233456000.0 is not greater than 1233456", max1.max(max2) .equals(max1)); max1 = new BigDecimal(-1.224D); max2 = new BigDecimal(-1.2245D); assertTrue("-1.224 is not greater than -1.2245", max1.max(max2).equals( max1)); max1 = new BigDecimal(123E18); max2 = new BigDecimal(123E19); assertTrue("123E19 is the not the max", max1.max(max2).equals(max2)); } /** * @tests java.math.BigDecimal#min(java.math.BigDecimal) */ public void test_minLjava_math_BigDecimal() { BigDecimal min1 = new BigDecimal(-12345.4D); BigDecimal min2 = new BigDecimal(-12345.39D); assertTrue("-12345.39 should have been returned", min1.min(min2) .equals(min1)); min1 = new BigDecimal(value2, 5); min2 = new BigDecimal(value2, 0); assertTrue("123345.6 should have been returned", min1.min(min2).equals( min1)); } /** * @tests java.math.BigDecimal#movePointLeft(int) */ public void test_movePointLeftI() { BigDecimal movePtLeft = new BigDecimal("123456265.34"); BigDecimal alreadyMoved = movePtLeft.movePointLeft(5); assertTrue("move point left 5 failed", alreadyMoved.scale() == 7 && alreadyMoved.toString().equals("1234.5626534")); movePtLeft = new BigDecimal(value2.negate(), 0); alreadyMoved = movePtLeft.movePointLeft(12); assertTrue("move point left 12 failed", alreadyMoved.scale() == 12 && alreadyMoved.toString().equals("-0.012334560000")); movePtLeft = new BigDecimal(123E18); alreadyMoved = movePtLeft.movePointLeft(2); assertTrue("move point left 2 failed", alreadyMoved.scale() == movePtLeft.scale() + 2 && alreadyMoved.doubleValue() == 1.23E18); movePtLeft = new BigDecimal(1.123E-12); alreadyMoved = movePtLeft.movePointLeft(3); assertTrue("move point left 3 failed", alreadyMoved.scale() == movePtLeft.scale() + 3 && alreadyMoved.doubleValue() == 1.123E-15); movePtLeft = new BigDecimal(value, 2); alreadyMoved = movePtLeft.movePointLeft(-2); assertTrue("move point left -2 failed", alreadyMoved.scale() == movePtLeft.scale() - 2 && alreadyMoved.toString().equals("12345908")); } /** * @tests java.math.BigDecimal#movePointRight(int) */ public void test_movePointRightI() { BigDecimal movePtRight = new BigDecimal("-1.58796521458"); BigDecimal alreadyMoved = movePtRight.movePointRight(8); assertTrue("move point right 8 failed", alreadyMoved.scale() == 3 && alreadyMoved.toString().equals("-158796521.458")); movePtRight = new BigDecimal(value, 2); alreadyMoved = movePtRight.movePointRight(4); assertTrue("move point right 4 failed", alreadyMoved.scale() == 0 && alreadyMoved.toString().equals("1234590800")); movePtRight = new BigDecimal(134E12); alreadyMoved = movePtRight.movePointRight(2); assertTrue("move point right 2 failed", alreadyMoved.scale() == 0 && alreadyMoved.toString().equals("13400000000000000")); movePtRight = new BigDecimal(-3.4E-10); alreadyMoved = movePtRight.movePointRight(5); assertTrue("move point right 5 failed", alreadyMoved.scale() == movePtRight.scale() - 5 && alreadyMoved.doubleValue() == -0.000034); alreadyMoved = alreadyMoved.movePointRight(-5); assertTrue("move point right -5 failed", alreadyMoved .equals(movePtRight)); } /** * @tests java.math.BigDecimal#multiply(java.math.BigDecimal) */ public void test_multiplyLjava_math_BigDecimal() { BigDecimal multi1 = new BigDecimal(value, 5); BigDecimal multi2 = new BigDecimal(2.345D); BigDecimal result = multi1.multiply(multi2); assertTrue("123.45908 * 2.345 is not correct: " + result, result .toString().startsWith("289.51154260") && result.scale() == multi1.scale() + multi2.scale()); multi1 = new BigDecimal("34656"); multi2 = new BigDecimal("-2"); result = multi1.multiply(multi2); assertTrue("34656 * 2 is not correct", result.toString().equals( "-69312") && result.scale() == 0); multi1 = new BigDecimal(-2.345E-02); multi2 = new BigDecimal(-134E130); result = multi1.multiply(multi2); assertTrue("-2.345E-02 * -134E130 is not correct " + result.doubleValue(), result.doubleValue() == 3.1422999999999997E130 && result.scale() == multi1.scale() + multi2.scale()); multi1 = new BigDecimal("11235"); multi2 = new BigDecimal("0"); result = multi1.multiply(multi2); assertTrue("11235 * 0 is not correct", result.doubleValue() == 0 && result.scale() == 0); multi1 = new BigDecimal("-0.00234"); multi2 = new BigDecimal(13.4E10); result = multi1.multiply(multi2); assertTrue("-0.00234 * 13.4E10 is not correct", result.doubleValue() == -313560000 && result.scale() == multi1.scale() + multi2.scale()); } /** * @tests java.math.BigDecimal#negate() */ public void test_negate() { BigDecimal negate1 = new BigDecimal(value2, 7); assertTrue("the negate of 1233.4560000 is not -1233.4560000", negate1 .negate().toString().equals("-1233.4560000")); negate1 = new BigDecimal("-23465839"); assertTrue("the negate of -23465839 is not 23465839", negate1.negate() .toString().equals("23465839")); negate1 = new BigDecimal(-3.456E6); assertTrue("the negate of -3.456E6 is not 3.456E6", negate1.negate() .negate().equals(negate1)); } /** * @tests java.math.BigDecimal#scale() */ public void test_scale() { BigDecimal scale1 = new BigDecimal(value2, 8); assertTrue("the scale of the number 123.34560000 is wrong", scale1 .scale() == 8); BigDecimal scale2 = new BigDecimal("29389."); assertTrue("the scale of the number 29389. is wrong", scale2.scale() == 0); BigDecimal scale3 = new BigDecimal(3.374E13); assertTrue("the scale of the number 3.374E13 is wrong", scale3.scale() == 0); BigDecimal scale4 = new BigDecimal("-3.45E-203"); // note the scale is calculated as 15 digits of 345000.... + exponent - // 1. -1 for the 3 assertTrue("the scale of the number -3.45E-203 is wrong: " + scale4.scale(), scale4.scale() == 205); scale4 = new BigDecimal("-345.4E-200"); assertTrue("the scale of the number -345.4E-200 is wrong", scale4 .scale() == 201); } /** * @tests java.math.BigDecimal#setScale(int) */ public void test_setScaleI() { // rounding mode defaults to zero BigDecimal setScale1 = new BigDecimal(value, 3); BigDecimal setScale2 = setScale1.setScale(5); BigInteger setresult = new BigInteger("1234590800"); assertTrue("the number 12345.908 after setting scale is wrong", setScale2.unscaledValue().equals(setresult) && setScale2.scale() == 5); try { setScale2 = setScale1.setScale(2, BigDecimal.ROUND_UNNECESSARY); fail("arithmetic Exception not caught as a result of loosing precision"); } catch (ArithmeticException e) { } } /** * @tests java.math.BigDecimal#setScale(int, int) */ public void test_setScaleII() { BigDecimal setScale1 = new BigDecimal(2.323E102); BigDecimal setScale2 = setScale1.setScale(4); assertTrue("the number 2.323E102 after setting scale is wrong", setScale2.scale() == 4); assertTrue("the representation of the number 2.323E102 is wrong", setScale2.doubleValue() == 2.323E102); setScale1 = new BigDecimal("-1.253E-12"); setScale2 = setScale1.setScale(17, BigDecimal.ROUND_CEILING); assertTrue("the number -1.253E-12 after setting scale is wrong", setScale2.scale() == 17); assertTrue( "the representation of the number -1.253E-12 after setting scale is wrong, " + setScale2.toString(), setScale2.toString().equals("-1.25300E-12")); // testing rounding Mode ROUND_CEILING setScale1 = new BigDecimal(value, 4); setScale2 = setScale1.setScale(1, BigDecimal.ROUND_CEILING); assertTrue( "the number 1234.5908 after setting scale to 1/ROUND_CEILING is wrong", setScale2.toString().equals("1234.6") && setScale2.scale() == 1); BigDecimal setNeg = new BigDecimal(value.negate(), 4); setScale2 = setNeg.setScale(1, BigDecimal.ROUND_CEILING); assertTrue( "the number -1234.5908 after setting scale to 1/ROUND_CEILING is wrong", setScale2.toString().equals("-1234.5") && setScale2.scale() == 1); // testing rounding Mode ROUND_DOWN setScale2 = setNeg.setScale(1, BigDecimal.ROUND_DOWN); assertTrue( "the number -1234.5908 after setting scale to 1/ROUND_DOWN is wrong", setScale2.toString().equals("-1234.5") && setScale2.scale() == 1); setScale1 = new BigDecimal(value, 4); setScale2 = setScale1.setScale(1, BigDecimal.ROUND_DOWN); assertTrue( "the number 1234.5908 after setting scale to 1/ROUND_DOWN is wrong", setScale2.toString().equals("1234.5") && setScale2.scale() == 1); // testing rounding Mode ROUND_FLOOR setScale2 = setScale1.setScale(1, BigDecimal.ROUND_FLOOR); assertTrue( "the number 1234.5908 after setting scale to 1/ROUND_FLOOR is wrong", setScale2.toString().equals("1234.5") && setScale2.scale() == 1); setScale2 = setNeg.setScale(1, BigDecimal.ROUND_FLOOR); assertTrue( "the number -1234.5908 after setting scale to 1/ROUND_FLOOR is wrong", setScale2.toString().equals("-1234.6") && setScale2.scale() == 1); // testing rounding Mode ROUND_HALF_DOWN setScale2 = setScale1.setScale(3, BigDecimal.ROUND_HALF_DOWN); assertTrue( "the number 1234.5908 after setting scale to 3/ROUND_HALF_DOWN is wrong", setScale2.toString().equals("1234.591") && setScale2.scale() == 3); setScale1 = new BigDecimal(new BigInteger("12345000"), 5); setScale2 = setScale1.setScale(1, BigDecimal.ROUND_HALF_DOWN); assertTrue( "the number 123.45908 after setting scale to 1/ROUND_HALF_DOWN is wrong", setScale2.toString().equals("123.4") && setScale2.scale() == 1); setScale2 = new BigDecimal("-1234.5000").setScale(0, BigDecimal.ROUND_HALF_DOWN); assertTrue( "the number -1234.5908 after setting scale to 0/ROUND_HALF_DOWN is wrong", setScale2.toString().equals("-1234") && setScale2.scale() == 0); // testing rounding Mode ROUND_HALF_EVEN setScale1 = new BigDecimal(1.2345789D); setScale2 = setScale1.setScale(4, BigDecimal.ROUND_HALF_EVEN); assertTrue( "the number 1.2345789 after setting scale to 4/ROUND_HALF_EVEN is wrong", setScale2.doubleValue() == 1.2346D && setScale2.scale() == 4); setNeg = new BigDecimal(-1.2335789D); setScale2 = setNeg.setScale(2, BigDecimal.ROUND_HALF_EVEN); assertTrue( "the number -1.2335789 after setting scale to 2/ROUND_HALF_EVEN is wrong", setScale2.doubleValue() == -1.23D && setScale2.scale() == 2); setScale2 = new BigDecimal("1.2345000").setScale(3, BigDecimal.ROUND_HALF_EVEN); assertTrue( "the number 1.2345789 after setting scale to 3/ROUND_HALF_EVEN is wrong", setScale2.doubleValue() == 1.234D && setScale2.scale() == 3); setScale2 = new BigDecimal("-1.2345000").setScale(3, BigDecimal.ROUND_HALF_EVEN); assertTrue( "the number -1.2335789 after setting scale to 3/ROUND_HALF_EVEN is wrong", setScale2.doubleValue() == -1.234D && setScale2.scale() == 3); // testing rounding Mode ROUND_HALF_UP setScale1 = new BigDecimal("134567.34650"); setScale2 = setScale1.setScale(3, BigDecimal.ROUND_HALF_UP); assertTrue( "the number 134567.34658 after setting scale to 3/ROUND_HALF_UP is wrong", setScale2.toString().equals("134567.347") && setScale2.scale() == 3); setNeg = new BigDecimal("-1234.4567"); setScale2 = setNeg.setScale(0, BigDecimal.ROUND_HALF_UP); assertTrue( "the number -1234.4567 after setting scale to 0/ROUND_HALF_UP is wrong", setScale2.toString().equals("-1234") && setScale2.scale() == 0); // testing rounding Mode ROUND_UNNECESSARY try { setScale1.setScale(3, BigDecimal.ROUND_UNNECESSARY); fail("arithmetic Exception not caught for round unnecessary"); } catch (ArithmeticException e) { } // testing rounding Mode ROUND_UP setScale1 = new BigDecimal("100000.374"); setScale2 = setScale1.setScale(2, BigDecimal.ROUND_UP); assertTrue( "the number 100000.374 after setting scale to 2/ROUND_UP is wrong", setScale2.toString().equals("100000.38") && setScale2.scale() == 2); setNeg = new BigDecimal(-134.34589D); setScale2 = setNeg.setScale(2, BigDecimal.ROUND_UP); assertTrue( "the number -134.34589 after setting scale to 2/ROUND_UP is wrong", setScale2.doubleValue() == -134.35D && setScale2.scale() == 2); // testing invalid rounding modes try { setScale2 = setScale1.setScale(0, -123); fail("IllegalArgumentException is not caught for wrong rounding mode"); } catch (IllegalArgumentException e) { } } /** * @tests java.math.BigDecimal#signum() */ public void test_signum() { BigDecimal sign = new BigDecimal(123E-104); assertTrue("123E-104 is not positive in signum()", sign.signum() == 1); sign = new BigDecimal("-1234.3959"); assertTrue("-1234.3959 is not negative in signum()", sign.signum() == -1); sign = new BigDecimal(000D); assertTrue("000D is not zero in signum()", sign.signum() == 0); } /** * @tests java.math.BigDecimal#subtract(java.math.BigDecimal) */ public void test_subtractLjava_math_BigDecimal() { BigDecimal sub1 = new BigDecimal("13948"); BigDecimal sub2 = new BigDecimal("2839.489"); BigDecimal result = sub1.subtract(sub2); assertTrue("13948 - 2839.489 is wrong: " + result, result.toString() .equals("11108.511") && result.scale() == 3); BigDecimal result2 = sub2.subtract(sub1); assertTrue("2839.489 - 13948 is wrong", result2.toString().equals( "-11108.511") && result2.scale() == 3); assertTrue("13948 - 2839.489 is not the negative of 2839.489 - 13948", result.equals(result2.negate())); sub1 = new BigDecimal(value, 1); sub2 = new BigDecimal("0"); result = sub1.subtract(sub2); assertTrue("1234590.8 - 0 is wrong", result.equals(sub1)); sub1 = new BigDecimal(1.234E-03); sub2 = new BigDecimal(3.423E-10); result = sub1.subtract(sub2); assertTrue("1.234E-03 - 3.423E-10 is wrong, " + result.doubleValue(), result.doubleValue() == 0.0012339996577); sub1 = new BigDecimal(1234.0123); sub2 = new BigDecimal(1234.0123000); result = sub1.subtract(sub2); assertTrue("1234.0123 - 1234.0123000 is wrong, " + result.doubleValue(), result.doubleValue() == 0.0); } /** * @tests java.math.BigDecimal#toBigInteger() */ public void test_toBigInteger() { BigDecimal sub1 = new BigDecimal("-29830.989"); BigInteger result = sub1.toBigInteger(); assertTrue("the bigInteger equivalent of -29830.989 is wrong", result .toString().equals("-29830")); sub1 = new BigDecimal(-2837E10); result = sub1.toBigInteger(); assertTrue("the bigInteger equivalent of -2837E10 is wrong", result .doubleValue() == -2837E10); sub1 = new BigDecimal(2.349E-10); result = sub1.toBigInteger(); assertTrue("the bigInteger equivalent of 2.349E-10 is wrong", result .equals(BigInteger.ZERO)); sub1 = new BigDecimal(value2, 6); result = sub1.toBigInteger(); assertTrue("the bigInteger equivalent of 12334.560000 is wrong", result .toString().equals("12334")); } /** * @tests java.math.BigDecimal#toString() */ public void test_toString() { BigDecimal toString1 = new BigDecimal("1234.000"); assertTrue("the toString representation of 1234.000 is wrong", toString1.toString().equals("1234.000")); toString1 = new BigDecimal("-123.4E-5"); assertTrue("the toString representation of -123.4E-5 is wrong: " + toString1, toString1.toString().equals("-0.001234")); toString1 = new BigDecimal("-1.455E-20"); assertTrue("the toString representation of -1.455E-20 is wrong", toString1.toString().equals("-1.455E-20")); toString1 = new BigDecimal(value2, 4); assertTrue("the toString representation of 1233456.0000 is wrong", toString1.toString().equals("1233456.0000")); } /** * @tests java.math.BigDecimal#unscaledValue() */ public void test_unscaledValue() { BigDecimal unsVal = new BigDecimal("-2839485.000"); assertTrue("the unscaledValue of -2839485.000 is wrong", unsVal .unscaledValue().toString().equals("-2839485000")); unsVal = new BigDecimal(123E10); assertTrue("the unscaledValue of 123E10 is wrong", unsVal .unscaledValue().toString().equals("1230000000000")); unsVal = new BigDecimal("-4.56E-13"); assertTrue("the unscaledValue of -4.56E-13 is wrong: " + unsVal.unscaledValue(), unsVal.unscaledValue().toString() .equals("-456")); unsVal = new BigDecimal(value, 3); assertTrue("the unscaledValue of 12345.908 is wrong", unsVal .unscaledValue().toString().equals("12345908")); } /** * @tests java.math.BigDecimal#valueOf(long) */ public void test_valueOfJ() { BigDecimal valueOfL = BigDecimal.valueOf(9223372036854775806L); assertTrue("the bigDecimal equivalent of 9223372036854775806 is wrong", valueOfL.unscaledValue().toString().equals( "9223372036854775806") && valueOfL.scale() == 0); assertTrue( "the toString representation of 9223372036854775806 is wrong", valueOfL.toString().equals("9223372036854775806")); valueOfL = BigDecimal.valueOf(0L); assertTrue("the bigDecimal equivalent of 0 is wrong", valueOfL .unscaledValue().toString().equals("0") && valueOfL.scale() == 0); } /** * @tests java.math.BigDecimal#valueOf(long, int) */ public void test_valueOfJI() { BigDecimal valueOfJI = BigDecimal.valueOf(9223372036854775806L, 5); assertTrue( "the bigDecimal equivalent of 92233720368547.75806 is wrong", valueOfJI.unscaledValue().toString().equals( "9223372036854775806") && valueOfJI.scale() == 5); assertTrue( "the toString representation of 9223372036854775806 is wrong", valueOfJI.toString().equals("92233720368547.75806")); valueOfJI = BigDecimal.valueOf(1234L, 8); assertTrue( "the bigDecimal equivalent of 92233720368547.75806 is wrong", valueOfJI.unscaledValue().toString().equals("1234") && valueOfJI.scale() == 8); assertTrue( "the toString representation of 9223372036854775806 is wrong", valueOfJI.toString().equals("0.00001234")); valueOfJI = BigDecimal.valueOf(0, 3); assertTrue( "the bigDecimal equivalent of 92233720368547.75806 is wrong", valueOfJI.unscaledValue().toString().equals("0") && valueOfJI.scale() == 3); assertTrue( "the toString representation of 9223372036854775806 is wrong", valueOfJI.toString().equals("0.000")); } public void test_BigDecimal_serialization() throws Exception { // Regression for HARMONY-1896 char[] in = { '1', '5', '6', '7', '8', '7', '.', '0', '0' }; BigDecimal bd = new BigDecimal(in, 0, 9); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(bd); ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(bis); BigDecimal nbd = (BigDecimal) ois.readObject(); assertEquals(bd.intValue(), nbd.intValue()); assertEquals(bd.doubleValue(), nbd.doubleValue(), 0.0); assertEquals(bd.toString(), nbd.toString()); } /** * @tests java.math.BigDecimal#stripTrailingZero(long) */ public void test_stripTrailingZero() { BigDecimal sixhundredtest = new BigDecimal("600.0"); assertTrue("stripTrailingZero failed for 600.0", ((sixhundredtest.stripTrailingZeros()).scale() == -2) ); /* Single digit, no trailing zero, odd number */ BigDecimal notrailingzerotest = new BigDecimal("1"); assertTrue("stripTrailingZero failed for 1", ((notrailingzerotest.stripTrailingZeros()).scale() == 0) ); BigDecimal zerotest = new BigDecimal("0.0000"); assertEquals("stripTrailingZero failed for 0.0000", 0, zerotest.stripTrailingZeros().scale()); } public void testMathContextConstruction() { String a = "-12380945E+61"; BigDecimal aNumber = new BigDecimal(a); int precision = 6; RoundingMode rm = RoundingMode.HALF_DOWN; MathContext mcIntRm = new MathContext(precision, rm); MathContext mcStr = new MathContext("precision=6 roundingMode=HALF_DOWN"); MathContext mcInt = new MathContext(precision); BigDecimal res = aNumber.abs(mcInt); assertEquals("MathContext Constructer with int precision failed", res, new BigDecimal("1.23809E+68")); assertEquals("Equal MathContexts are not Equal ", mcIntRm, mcStr); assertEquals("Different MathContext are reported as Equal ", mcInt.equals(mcStr), false); assertEquals("Equal MathContexts have different hashcodes ", mcIntRm.hashCode(), mcStr.hashCode()); assertEquals("MathContext.toString() returning incorrect value", mcIntRm.toString(), "precision=6 roundingMode=HALF_DOWN"); } }
googleapis/google-cloud-java
37,897
java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/DataSharingSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1beta/resources.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1beta; /** * * * <pre> * A resource message representing data sharing settings of a Google Analytics * account. * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.DataSharingSettings} */ public final class DataSharingSettings extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.DataSharingSettings) DataSharingSettingsOrBuilder { private static final long serialVersionUID = 0L; // Use DataSharingSettings.newBuilder() to construct. private DataSharingSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DataSharingSettings() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DataSharingSettings(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.ResourcesProto .internal_static_google_analytics_admin_v1beta_DataSharingSettings_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.ResourcesProto .internal_static_google_analytics_admin_v1beta_DataSharingSettings_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.DataSharingSettings.class, com.google.analytics.admin.v1beta.DataSharingSettings.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SHARING_WITH_GOOGLE_SUPPORT_ENABLED_FIELD_NUMBER = 2; private boolean sharingWithGoogleSupportEnabled_ = false; /** * * * <pre> * Allows Google support to access the data in order to help troubleshoot * issues. * </pre> * * <code>bool sharing_with_google_support_enabled = 2;</code> * * @return The sharingWithGoogleSupportEnabled. */ @java.lang.Override public boolean getSharingWithGoogleSupportEnabled() { return sharingWithGoogleSupportEnabled_; } public static final int SHARING_WITH_GOOGLE_ASSIGNED_SALES_ENABLED_FIELD_NUMBER = 3; private boolean sharingWithGoogleAssignedSalesEnabled_ = false; /** * * * <pre> * Allows Google sales teams that are assigned to the customer to access the * data in order to suggest configuration changes to improve results. * Sales team restrictions still apply when enabled. * </pre> * * <code>bool sharing_with_google_assigned_sales_enabled = 3;</code> * * @return The sharingWithGoogleAssignedSalesEnabled. */ @java.lang.Override public boolean getSharingWithGoogleAssignedSalesEnabled() { return sharingWithGoogleAssignedSalesEnabled_; } public static final int SHARING_WITH_GOOGLE_ANY_SALES_ENABLED_FIELD_NUMBER = 4; private boolean sharingWithGoogleAnySalesEnabled_ = false; /** * * * <pre> * Allows any of Google sales to access the data in order to suggest * configuration changes to improve results. * </pre> * * <code>bool sharing_with_google_any_sales_enabled = 4;</code> * * @return The sharingWithGoogleAnySalesEnabled. */ @java.lang.Override public boolean getSharingWithGoogleAnySalesEnabled() { return sharingWithGoogleAnySalesEnabled_; } public static final int SHARING_WITH_GOOGLE_PRODUCTS_ENABLED_FIELD_NUMBER = 5; private boolean sharingWithGoogleProductsEnabled_ = false; /** * * * <pre> * Allows Google to use the data to improve other Google products or services. * </pre> * * <code>bool sharing_with_google_products_enabled = 5;</code> * * @return The sharingWithGoogleProductsEnabled. */ @java.lang.Override public boolean getSharingWithGoogleProductsEnabled() { return sharingWithGoogleProductsEnabled_; } public static final int SHARING_WITH_OTHERS_ENABLED_FIELD_NUMBER = 6; private boolean sharingWithOthersEnabled_ = false; /** * * * <pre> * Allows Google to share the data anonymously in aggregate form with others. * </pre> * * <code>bool sharing_with_others_enabled = 6;</code> * * @return The sharingWithOthersEnabled. */ @java.lang.Override public boolean getSharingWithOthersEnabled() { return sharingWithOthersEnabled_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (sharingWithGoogleSupportEnabled_ != false) { output.writeBool(2, sharingWithGoogleSupportEnabled_); } if (sharingWithGoogleAssignedSalesEnabled_ != false) { output.writeBool(3, sharingWithGoogleAssignedSalesEnabled_); } if (sharingWithGoogleAnySalesEnabled_ != false) { output.writeBool(4, sharingWithGoogleAnySalesEnabled_); } if (sharingWithGoogleProductsEnabled_ != false) { output.writeBool(5, sharingWithGoogleProductsEnabled_); } if (sharingWithOthersEnabled_ != false) { output.writeBool(6, sharingWithOthersEnabled_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (sharingWithGoogleSupportEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 2, sharingWithGoogleSupportEnabled_); } if (sharingWithGoogleAssignedSalesEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 3, sharingWithGoogleAssignedSalesEnabled_); } if (sharingWithGoogleAnySalesEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 4, sharingWithGoogleAnySalesEnabled_); } if (sharingWithGoogleProductsEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 5, sharingWithGoogleProductsEnabled_); } if (sharingWithOthersEnabled_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(6, sharingWithOthersEnabled_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1beta.DataSharingSettings)) { return super.equals(obj); } com.google.analytics.admin.v1beta.DataSharingSettings other = (com.google.analytics.admin.v1beta.DataSharingSettings) obj; if (!getName().equals(other.getName())) return false; if (getSharingWithGoogleSupportEnabled() != other.getSharingWithGoogleSupportEnabled()) return false; if (getSharingWithGoogleAssignedSalesEnabled() != other.getSharingWithGoogleAssignedSalesEnabled()) return false; if (getSharingWithGoogleAnySalesEnabled() != other.getSharingWithGoogleAnySalesEnabled()) return false; if (getSharingWithGoogleProductsEnabled() != other.getSharingWithGoogleProductsEnabled()) return false; if (getSharingWithOthersEnabled() != other.getSharingWithOthersEnabled()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + SHARING_WITH_GOOGLE_SUPPORT_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSharingWithGoogleSupportEnabled()); hash = (37 * hash) + SHARING_WITH_GOOGLE_ASSIGNED_SALES_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSharingWithGoogleAssignedSalesEnabled()); hash = (37 * hash) + SHARING_WITH_GOOGLE_ANY_SALES_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSharingWithGoogleAnySalesEnabled()); hash = (37 * hash) + SHARING_WITH_GOOGLE_PRODUCTS_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSharingWithGoogleProductsEnabled()); hash = (37 * hash) + SHARING_WITH_OTHERS_ENABLED_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSharingWithOthersEnabled()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1beta.DataSharingSettings parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1beta.DataSharingSettings prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A resource message representing data sharing settings of a Google Analytics * account. * </pre> * * Protobuf type {@code google.analytics.admin.v1beta.DataSharingSettings} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.DataSharingSettings) com.google.analytics.admin.v1beta.DataSharingSettingsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1beta.ResourcesProto .internal_static_google_analytics_admin_v1beta_DataSharingSettings_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1beta.ResourcesProto .internal_static_google_analytics_admin_v1beta_DataSharingSettings_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1beta.DataSharingSettings.class, com.google.analytics.admin.v1beta.DataSharingSettings.Builder.class); } // Construct using com.google.analytics.admin.v1beta.DataSharingSettings.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; sharingWithGoogleSupportEnabled_ = false; sharingWithGoogleAssignedSalesEnabled_ = false; sharingWithGoogleAnySalesEnabled_ = false; sharingWithGoogleProductsEnabled_ = false; sharingWithOthersEnabled_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1beta.ResourcesProto .internal_static_google_analytics_admin_v1beta_DataSharingSettings_descriptor; } @java.lang.Override public com.google.analytics.admin.v1beta.DataSharingSettings getDefaultInstanceForType() { return com.google.analytics.admin.v1beta.DataSharingSettings.getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1beta.DataSharingSettings build() { com.google.analytics.admin.v1beta.DataSharingSettings result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1beta.DataSharingSettings buildPartial() { com.google.analytics.admin.v1beta.DataSharingSettings result = new com.google.analytics.admin.v1beta.DataSharingSettings(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.admin.v1beta.DataSharingSettings result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.sharingWithGoogleSupportEnabled_ = sharingWithGoogleSupportEnabled_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.sharingWithGoogleAssignedSalesEnabled_ = sharingWithGoogleAssignedSalesEnabled_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.sharingWithGoogleAnySalesEnabled_ = sharingWithGoogleAnySalesEnabled_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.sharingWithGoogleProductsEnabled_ = sharingWithGoogleProductsEnabled_; } if (((from_bitField0_ & 0x00000020) != 0)) { result.sharingWithOthersEnabled_ = sharingWithOthersEnabled_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1beta.DataSharingSettings) { return mergeFrom((com.google.analytics.admin.v1beta.DataSharingSettings) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.admin.v1beta.DataSharingSettings other) { if (other == com.google.analytics.admin.v1beta.DataSharingSettings.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.getSharingWithGoogleSupportEnabled() != false) { setSharingWithGoogleSupportEnabled(other.getSharingWithGoogleSupportEnabled()); } if (other.getSharingWithGoogleAssignedSalesEnabled() != false) { setSharingWithGoogleAssignedSalesEnabled(other.getSharingWithGoogleAssignedSalesEnabled()); } if (other.getSharingWithGoogleAnySalesEnabled() != false) { setSharingWithGoogleAnySalesEnabled(other.getSharingWithGoogleAnySalesEnabled()); } if (other.getSharingWithGoogleProductsEnabled() != false) { setSharingWithGoogleProductsEnabled(other.getSharingWithGoogleProductsEnabled()); } if (other.getSharingWithOthersEnabled() != false) { setSharingWithOthersEnabled(other.getSharingWithOthersEnabled()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { sharingWithGoogleSupportEnabled_ = input.readBool(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { sharingWithGoogleAssignedSalesEnabled_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { sharingWithGoogleAnySalesEnabled_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 case 40: { sharingWithGoogleProductsEnabled_ = input.readBool(); bitField0_ |= 0x00000010; break; } // case 40 case 48: { sharingWithOthersEnabled_ = input.readBool(); bitField0_ |= 0x00000020; break; } // case 48 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. Resource name. * Format: accounts/{account}/dataSharingSettings * Example: "accounts/1000/dataSharingSettings" * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private boolean sharingWithGoogleSupportEnabled_; /** * * * <pre> * Allows Google support to access the data in order to help troubleshoot * issues. * </pre> * * <code>bool sharing_with_google_support_enabled = 2;</code> * * @return The sharingWithGoogleSupportEnabled. */ @java.lang.Override public boolean getSharingWithGoogleSupportEnabled() { return sharingWithGoogleSupportEnabled_; } /** * * * <pre> * Allows Google support to access the data in order to help troubleshoot * issues. * </pre> * * <code>bool sharing_with_google_support_enabled = 2;</code> * * @param value The sharingWithGoogleSupportEnabled to set. * @return This builder for chaining. */ public Builder setSharingWithGoogleSupportEnabled(boolean value) { sharingWithGoogleSupportEnabled_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Allows Google support to access the data in order to help troubleshoot * issues. * </pre> * * <code>bool sharing_with_google_support_enabled = 2;</code> * * @return This builder for chaining. */ public Builder clearSharingWithGoogleSupportEnabled() { bitField0_ = (bitField0_ & ~0x00000002); sharingWithGoogleSupportEnabled_ = false; onChanged(); return this; } private boolean sharingWithGoogleAssignedSalesEnabled_; /** * * * <pre> * Allows Google sales teams that are assigned to the customer to access the * data in order to suggest configuration changes to improve results. * Sales team restrictions still apply when enabled. * </pre> * * <code>bool sharing_with_google_assigned_sales_enabled = 3;</code> * * @return The sharingWithGoogleAssignedSalesEnabled. */ @java.lang.Override public boolean getSharingWithGoogleAssignedSalesEnabled() { return sharingWithGoogleAssignedSalesEnabled_; } /** * * * <pre> * Allows Google sales teams that are assigned to the customer to access the * data in order to suggest configuration changes to improve results. * Sales team restrictions still apply when enabled. * </pre> * * <code>bool sharing_with_google_assigned_sales_enabled = 3;</code> * * @param value The sharingWithGoogleAssignedSalesEnabled to set. * @return This builder for chaining. */ public Builder setSharingWithGoogleAssignedSalesEnabled(boolean value) { sharingWithGoogleAssignedSalesEnabled_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Allows Google sales teams that are assigned to the customer to access the * data in order to suggest configuration changes to improve results. * Sales team restrictions still apply when enabled. * </pre> * * <code>bool sharing_with_google_assigned_sales_enabled = 3;</code> * * @return This builder for chaining. */ public Builder clearSharingWithGoogleAssignedSalesEnabled() { bitField0_ = (bitField0_ & ~0x00000004); sharingWithGoogleAssignedSalesEnabled_ = false; onChanged(); return this; } private boolean sharingWithGoogleAnySalesEnabled_; /** * * * <pre> * Allows any of Google sales to access the data in order to suggest * configuration changes to improve results. * </pre> * * <code>bool sharing_with_google_any_sales_enabled = 4;</code> * * @return The sharingWithGoogleAnySalesEnabled. */ @java.lang.Override public boolean getSharingWithGoogleAnySalesEnabled() { return sharingWithGoogleAnySalesEnabled_; } /** * * * <pre> * Allows any of Google sales to access the data in order to suggest * configuration changes to improve results. * </pre> * * <code>bool sharing_with_google_any_sales_enabled = 4;</code> * * @param value The sharingWithGoogleAnySalesEnabled to set. * @return This builder for chaining. */ public Builder setSharingWithGoogleAnySalesEnabled(boolean value) { sharingWithGoogleAnySalesEnabled_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Allows any of Google sales to access the data in order to suggest * configuration changes to improve results. * </pre> * * <code>bool sharing_with_google_any_sales_enabled = 4;</code> * * @return This builder for chaining. */ public Builder clearSharingWithGoogleAnySalesEnabled() { bitField0_ = (bitField0_ & ~0x00000008); sharingWithGoogleAnySalesEnabled_ = false; onChanged(); return this; } private boolean sharingWithGoogleProductsEnabled_; /** * * * <pre> * Allows Google to use the data to improve other Google products or services. * </pre> * * <code>bool sharing_with_google_products_enabled = 5;</code> * * @return The sharingWithGoogleProductsEnabled. */ @java.lang.Override public boolean getSharingWithGoogleProductsEnabled() { return sharingWithGoogleProductsEnabled_; } /** * * * <pre> * Allows Google to use the data to improve other Google products or services. * </pre> * * <code>bool sharing_with_google_products_enabled = 5;</code> * * @param value The sharingWithGoogleProductsEnabled to set. * @return This builder for chaining. */ public Builder setSharingWithGoogleProductsEnabled(boolean value) { sharingWithGoogleProductsEnabled_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Allows Google to use the data to improve other Google products or services. * </pre> * * <code>bool sharing_with_google_products_enabled = 5;</code> * * @return This builder for chaining. */ public Builder clearSharingWithGoogleProductsEnabled() { bitField0_ = (bitField0_ & ~0x00000010); sharingWithGoogleProductsEnabled_ = false; onChanged(); return this; } private boolean sharingWithOthersEnabled_; /** * * * <pre> * Allows Google to share the data anonymously in aggregate form with others. * </pre> * * <code>bool sharing_with_others_enabled = 6;</code> * * @return The sharingWithOthersEnabled. */ @java.lang.Override public boolean getSharingWithOthersEnabled() { return sharingWithOthersEnabled_; } /** * * * <pre> * Allows Google to share the data anonymously in aggregate form with others. * </pre> * * <code>bool sharing_with_others_enabled = 6;</code> * * @param value The sharingWithOthersEnabled to set. * @return This builder for chaining. */ public Builder setSharingWithOthersEnabled(boolean value) { sharingWithOthersEnabled_ = value; bitField0_ |= 0x00000020; onChanged(); return this; } /** * * * <pre> * Allows Google to share the data anonymously in aggregate form with others. * </pre> * * <code>bool sharing_with_others_enabled = 6;</code> * * @return This builder for chaining. */ public Builder clearSharingWithOthersEnabled() { bitField0_ = (bitField0_ & ~0x00000020); sharingWithOthersEnabled_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.DataSharingSettings) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.DataSharingSettings) private static final com.google.analytics.admin.v1beta.DataSharingSettings DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.DataSharingSettings(); } public static com.google.analytics.admin.v1beta.DataSharingSettings getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DataSharingSettings> PARSER = new com.google.protobuf.AbstractParser<DataSharingSettings>() { @java.lang.Override public DataSharingSettings parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DataSharingSettings> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DataSharingSettings> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1beta.DataSharingSettings getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/j2objc
38,135
jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/util/CharsTrie.java
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /* ******************************************************************************* * Copyright (C) 2011-2014, International Business Machines * Corporation and others. All Rights Reserved. ******************************************************************************* * created on: 2011jan06 * created by: Markus W. Scherer * ported from ICU4C ucharstrie.h/.cpp */ package android.icu.util; import java.io.IOException; import java.util.ArrayList; import java.util.NoSuchElementException; import android.icu.text.UTF16; import android.icu.util.BytesTrie.Result; /** * Light-weight, non-const reader class for a CharsTrie. * Traverses a char-serialized data structure with minimal state, * for mapping strings (16-bit-unit sequences) to non-negative integer values. * * <p>This class is not intended for public subclassing. * * @author Markus W. Scherer * @hide Only a subset of ICU is exposed in Android */ public final class CharsTrie implements Cloneable, Iterable<CharsTrie.Entry> { /** * Constructs a CharsTrie reader instance. * * <p>The CharSequence must contain a copy of a char sequence from the CharsTrieBuilder, * with the offset indicating the first char of that sequence. * The CharsTrie object will not read more chars than * the CharsTrieBuilder generated in the corresponding build() call. * * <p>The CharSequence is not copied/cloned and must not be modified while * the CharsTrie object is in use. * * @param trieChars CharSequence that contains the serialized trie. * @param offset Root offset of the trie in the CharSequence. */ public CharsTrie(CharSequence trieChars, int offset) { chars_=trieChars; pos_=root_=offset; remainingMatchLength_=-1; } /** * Clones this trie reader object and its state, * but not the char array which will be shared. * @return A shallow clone of this trie. */ @Override public Object clone() throws CloneNotSupportedException { return super.clone(); // A shallow copy is just what we need. } /** * Resets this trie to its initial state. * @return this */ public CharsTrie reset() { pos_=root_; remainingMatchLength_=-1; return this; } /** * CharsTrie state object, for saving a trie's current state * and resetting the trie back to this state later. */ public static final class State { /** * Constructs an empty State. */ public State() {} private CharSequence chars; private int root; private int pos; private int remainingMatchLength; } /** * Saves the state of this trie. * @param state The State object to hold the trie's state. * @return this * @see #resetToState */ public CharsTrie saveState(State state) /*const*/ { state.chars=chars_; state.root=root_; state.pos=pos_; state.remainingMatchLength=remainingMatchLength_; return this; } /** * Resets this trie to the saved state. * @param state The State object which holds a saved trie state. * @return this * @throws IllegalArgumentException if the state object contains no state, * or the state of a different trie * @see #saveState * @see #reset */ public CharsTrie resetToState(State state) { if(chars_==state.chars && chars_!=null && root_==state.root) { pos_=state.pos; remainingMatchLength_=state.remainingMatchLength; } else { throw new IllegalArgumentException("incompatible trie state"); } return this; } /** * Determines whether the string so far matches, whether it has a value, * and whether another input char can continue a matching string. * @return The match/value Result. */ public Result current() /*const*/ { int pos=pos_; if(pos<0) { return Result.NO_MATCH; } else { int node; return (remainingMatchLength_<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } } /** * Traverses the trie from the initial state for this input char. * Equivalent to reset().next(inUnit). * @param inUnit Input char value. Values below 0 and above 0xffff will never match. * @return The match/value Result. */ public Result first(int inUnit) { remainingMatchLength_=-1; return nextImpl(root_, inUnit); } /** * Traverses the trie from the initial state for the * one or two UTF-16 code units for this input code point. * Equivalent to reset().nextForCodePoint(cp). * @param cp A Unicode code point 0..0x10ffff. * @return The match/value Result. */ public Result firstForCodePoint(int cp) { return cp<=0xffff ? first(cp) : (first(UTF16.getLeadSurrogate(cp)).hasNext() ? next(UTF16.getTrailSurrogate(cp)) : Result.NO_MATCH); } /** * Traverses the trie from the current state for this input char. * @param inUnit Input char value. Values below 0 and above 0xffff will never match. * @return The match/value Result. */ public Result next(int inUnit) { int pos=pos_; if(pos<0) { return Result.NO_MATCH; } int length=remainingMatchLength_; // Actual remaining match length minus 1. if(length>=0) { // Remaining part of a linear-match node. if(inUnit==chars_.charAt(pos++)) { remainingMatchLength_=--length; pos_=pos; int node; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } else { stop(); return Result.NO_MATCH; } } return nextImpl(pos, inUnit); } /** * Traverses the trie from the current state for the * one or two UTF-16 code units for this input code point. * @param cp A Unicode code point 0..0x10ffff. * @return The match/value Result. */ public Result nextForCodePoint(int cp) { return cp<=0xffff ? next(cp) : (next(UTF16.getLeadSurrogate(cp)).hasNext() ? next(UTF16.getTrailSurrogate(cp)) : Result.NO_MATCH); } /** * Traverses the trie from the current state for this string. * Equivalent to * <pre> * Result result=current(); * for(each c in s) * if(!result.hasNext()) return Result.NO_MATCH; * result=next(c); * return result; * </pre> * @param s Contains a string. * @param sIndex The start index of the string in s. * @param sLimit The (exclusive) end index of the string in s. * @return The match/value Result. */ public Result next(CharSequence s, int sIndex, int sLimit) { if(sIndex>=sLimit) { // Empty input. return current(); } int pos=pos_; if(pos<0) { return Result.NO_MATCH; } int length=remainingMatchLength_; // Actual remaining match length minus 1. for(;;) { // Fetch the next input unit, if there is one. // Continue a linear-match node. char inUnit; for(;;) { if(sIndex==sLimit) { remainingMatchLength_=length; pos_=pos; int node; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } inUnit=s.charAt(sIndex++); if(length<0) { remainingMatchLength_=length; break; } if(inUnit!=chars_.charAt(pos)) { stop(); return Result.NO_MATCH; } ++pos; --length; } int node=chars_.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { Result result=branchNext(pos, node, inUnit); if(result==Result.NO_MATCH) { return Result.NO_MATCH; } // Fetch the next input unit, if there is one. if(sIndex==sLimit) { return result; } if(result==Result.FINAL_VALUE) { // No further matching units. stop(); return Result.NO_MATCH; } inUnit=s.charAt(sIndex++); pos=pos_; // branchNext() advanced pos and wrote it to pos_ . node=chars_.charAt(pos++); } else if(node<kMinValueLead) { // Match length+1 units. length=node-kMinLinearMatch; // Actual match length minus 1. if(inUnit!=chars_.charAt(pos)) { stop(); return Result.NO_MATCH; } ++pos; --length; break; } else if((node&kValueIsFinal)!=0) { // No further matching units. stop(); return Result.NO_MATCH; } else { // Skip intermediate value. pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } } } /** * Returns a matching string's value if called immediately after * current()/first()/next() returned Result.INTERMEDIATE_VALUE or Result.FINAL_VALUE. * getValue() can be called multiple times. * * Do not call getValue() after Result.NO_MATCH or Result.NO_VALUE! * @return The value for the string so far. */ public int getValue() /*const*/ { int pos=pos_; int leadUnit=chars_.charAt(pos++); assert(leadUnit>=kMinValueLead); return (leadUnit&kValueIsFinal)!=0 ? readValue(chars_, pos, leadUnit&0x7fff) : readNodeValue(chars_, pos, leadUnit); } /** * Determines whether all strings reachable from the current state * map to the same value, and if so, returns that value. * @return The unique value in bits 32..1 with bit 0 set, * if all strings reachable from the current state * map to the same value; otherwise returns 0. */ public long getUniqueValue() /*const*/ { int pos=pos_; if(pos<0) { return 0; } // Skip the rest of a pending linear-match node. long uniqueValue=findUniqueValue(chars_, pos+remainingMatchLength_+1, 0); // Ignore internally used bits 63..33; extend the actual value's sign bit from bit 32. return (uniqueValue<<31)>>31; } /** * Finds each char which continues the string from the current state. * That is, each char c for which it would be next(c)!=Result.NO_MATCH now. * @param out Each next char is appended to this object. * (Only uses the out.append(c) method.) * @return The number of chars which continue the string from here. */ public int getNextChars(Appendable out) /*const*/ { int pos=pos_; if(pos<0) { return 0; } if(remainingMatchLength_>=0) { append(out, chars_.charAt(pos)); // Next unit of a pending linear-match node. return 1; } int node=chars_.charAt(pos++); if(node>=kMinValueLead) { if((node&kValueIsFinal)!=0) { return 0; } else { pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } if(node<kMinLinearMatch) { if(node==0) { node=chars_.charAt(pos++); } getNextBranchChars(chars_, pos, ++node, out); return node; } else { // First unit of the linear-match node. append(out, chars_.charAt(pos)); return 1; } } /** * Iterates from the current state of this trie. * @return A new CharsTrie.Iterator. */ @Override public Iterator iterator() { return new Iterator(chars_, pos_, remainingMatchLength_, 0); } /** * Iterates from the current state of this trie. * @param maxStringLength If 0, the iterator returns full strings. * Otherwise, the iterator returns strings with this maximum length. * @return A new CharsTrie.Iterator. */ public Iterator iterator(int maxStringLength) { return new Iterator(chars_, pos_, remainingMatchLength_, maxStringLength); } /** * Iterates from the root of a char-serialized BytesTrie. * @param trieChars CharSequence that contains the serialized trie. * @param offset Root offset of the trie in the CharSequence. * @param maxStringLength If 0, the iterator returns full strings. * Otherwise, the iterator returns strings with this maximum length. * @return A new CharsTrie.Iterator. */ public static Iterator iterator(CharSequence trieChars, int offset, int maxStringLength) { return new Iterator(trieChars, offset, -1, maxStringLength); } /** * Return value type for the Iterator. */ public static final class Entry { /** * The string. */ public CharSequence chars; /** * The value associated with the string. */ public int value; private Entry() { } } /** * Iterator for all of the (string, value) pairs in a CharsTrie. */ public static final class Iterator implements java.util.Iterator<Entry> { private Iterator(CharSequence trieChars, int offset, int remainingMatchLength, int maxStringLength) { chars_=trieChars; pos_=initialPos_=offset; remainingMatchLength_=initialRemainingMatchLength_=remainingMatchLength; maxLength_=maxStringLength; int length=remainingMatchLength_; // Actual remaining match length minus 1. if(length>=0) { // Pending linear-match node, append remaining bytes to str_. ++length; if(maxLength_>0 && length>maxLength_) { length=maxLength_; // This will leave remainingMatchLength>=0 as a signal. } str_.append(chars_, pos_, pos_+length); pos_+=length; remainingMatchLength_-=length; } } /** * Resets this iterator to its initial state. * @return this */ public Iterator reset() { pos_=initialPos_; remainingMatchLength_=initialRemainingMatchLength_; skipValue_=false; int length=remainingMatchLength_+1; // Remaining match length. if(maxLength_>0 && length>maxLength_) { length=maxLength_; } str_.setLength(length); pos_+=length; remainingMatchLength_-=length; stack_.clear(); return this; } /** * @return true if there are more elements. */ @Override public boolean hasNext() /*const*/ { return pos_>=0 || !stack_.isEmpty(); } /** * Finds the next (string, value) pair if there is one. * * If the string is truncated to the maximum length and does not * have a real value, then the value is set to -1. * In this case, this "not a real value" is indistinguishable from * a real value of -1. * @return An Entry with the string and value of the next element. * @throws NoSuchElementException - iteration has no more elements. */ @Override public Entry next() { int pos=pos_; if(pos<0) { if(stack_.isEmpty()) { throw new NoSuchElementException(); } // Pop the state off the stack and continue with the next outbound edge of // the branch node. long top=stack_.remove(stack_.size()-1); int length=(int)top; pos=(int)(top>>32); str_.setLength(length&0xffff); length>>>=16; if(length>1) { pos=branchNext(pos, length); if(pos<0) { return entry_; // Reached a final value. } } else { str_.append(chars_.charAt(pos++)); } } if(remainingMatchLength_>=0) { // We only get here if we started in a pending linear-match node // with more than maxLength remaining units. return truncateAndStop(); } for(;;) { int node=chars_.charAt(pos++); if(node>=kMinValueLead) { if(skipValue_) { pos=skipNodeValue(pos, node); node&=kNodeTypeMask; skipValue_=false; } else { // Deliver value for the string so far. boolean isFinal=(node&kValueIsFinal)!=0; if(isFinal) { entry_.value=readValue(chars_, pos, node&0x7fff); } else { entry_.value=readNodeValue(chars_, pos, node); } if(isFinal || (maxLength_>0 && str_.length()==maxLength_)) { pos_=-1; } else { // We cannot skip the value right here because it shares its // lead unit with a match node which we have to evaluate // next time. // Instead, keep pos_ on the node lead unit itself. pos_=pos-1; skipValue_=true; } entry_.chars=str_; return entry_; } } if(maxLength_>0 && str_.length()==maxLength_) { return truncateAndStop(); } if(node<kMinLinearMatch) { if(node==0) { node=chars_.charAt(pos++); } pos=branchNext(pos, node+1); if(pos<0) { return entry_; // Reached a final value. } } else { // Linear-match node, append length units to str_. int length=node-kMinLinearMatch+1; if(maxLength_>0 && str_.length()+length>maxLength_) { str_.append(chars_, pos, pos+maxLength_-str_.length()); return truncateAndStop(); } str_.append(chars_, pos, pos+length); pos+=length; } } } /** * Iterator.remove() is not supported. * @throws UnsupportedOperationException (always) */ @Override public void remove() { throw new UnsupportedOperationException(); } private Entry truncateAndStop() { pos_=-1; // We reset entry_.chars every time we return entry_ // just because the caller might have modified the Entry. entry_.chars=str_; entry_.value=-1; // no real value for str return entry_; } private int branchNext(int pos, int length) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit // Push state for the greater-or-equal edge. stack_.add(((long)skipDelta(chars_, pos)<<32)|((length-(length>>1))<<16)|str_.length()); // Follow the less-than edge. length>>=1; pos=jumpByDelta(chars_, pos); } // List of key-value pairs where values are either final values or jump deltas. // Read the first (key, value) pair. char trieUnit=chars_.charAt(pos++); int node=chars_.charAt(pos++); boolean isFinal=(node&kValueIsFinal)!=0; int value=readValue(chars_, pos, node&=0x7fff); pos=skipValue(pos, node); stack_.add(((long)pos<<32)|((length-1)<<16)|str_.length()); str_.append(trieUnit); if(isFinal) { pos_=-1; entry_.chars=str_; entry_.value=value; return -1; } else { return pos+value; } } private CharSequence chars_; private int pos_; private int initialPos_; private int remainingMatchLength_; private int initialRemainingMatchLength_; private boolean skipValue_; // Skip intermediate value which was already delivered. private StringBuilder str_=new StringBuilder(); private int maxLength_; private Entry entry_=new Entry(); // The stack stores longs for backtracking to another // outbound edge of a branch node. // Each long has the offset in chars_ in bits 62..32, // the str_.length() from before the node in bits 15..0, // and the remaining branch length in bits 31..16. // (We could store the remaining branch length minus 1 in bits 30..16 and not use bit 31, // but the code looks more confusing that way.) private ArrayList<Long> stack_=new ArrayList<Long>(); } private void stop() { pos_=-1; } // Reads a compact 32-bit integer. // pos is already after the leadUnit, and the lead unit has bit 15 reset. private static int readValue(CharSequence chars, int pos, int leadUnit) { int value; if(leadUnit<kMinTwoUnitValueLead) { value=leadUnit; } else if(leadUnit<kThreeUnitValueLead) { value=((leadUnit-kMinTwoUnitValueLead)<<16)|chars.charAt(pos); } else { value=(chars.charAt(pos)<<16)|chars.charAt(pos+1); } return value; } private static int skipValue(int pos, int leadUnit) { if(leadUnit>=kMinTwoUnitValueLead) { if(leadUnit<kThreeUnitValueLead) { ++pos; } else { pos+=2; } } return pos; } private static int skipValue(CharSequence chars, int pos) { int leadUnit=chars.charAt(pos++); return skipValue(pos, leadUnit&0x7fff); } private static int readNodeValue(CharSequence chars, int pos, int leadUnit) { assert(kMinValueLead<=leadUnit && leadUnit<kValueIsFinal); int value; if(leadUnit<kMinTwoUnitNodeValueLead) { value=(leadUnit>>6)-1; } else if(leadUnit<kThreeUnitNodeValueLead) { value=(((leadUnit&0x7fc0)-kMinTwoUnitNodeValueLead)<<10)|chars.charAt(pos); } else { value=(chars.charAt(pos)<<16)|chars.charAt(pos+1); } return value; } private static int skipNodeValue(int pos, int leadUnit) { assert(kMinValueLead<=leadUnit && leadUnit<kValueIsFinal); if(leadUnit>=kMinTwoUnitNodeValueLead) { if(leadUnit<kThreeUnitNodeValueLead) { ++pos; } else { pos+=2; } } return pos; } private static int jumpByDelta(CharSequence chars, int pos) { int delta=chars.charAt(pos++); if(delta>=kMinTwoUnitDeltaLead) { if(delta==kThreeUnitDeltaLead) { delta=(chars.charAt(pos)<<16)|chars.charAt(pos+1); pos+=2; } else { delta=((delta-kMinTwoUnitDeltaLead)<<16)|chars.charAt(pos++); } } return pos+delta; } private static int skipDelta(CharSequence chars, int pos) { int delta=chars.charAt(pos++); if(delta>=kMinTwoUnitDeltaLead) { if(delta==kThreeUnitDeltaLead) { pos+=2; } else { ++pos; } } return pos; } private static Result[] valueResults_={ Result.INTERMEDIATE_VALUE, Result.FINAL_VALUE }; // Handles a branch node for both next(unit) and next(string). private Result branchNext(int pos, int length, int inUnit) { // Branch according to the current unit. if(length==0) { length=chars_.charAt(pos++); } ++length; // The length of the branch is the number of units to select from. // The data structure encodes a binary search. while(length>kMaxBranchLinearSubNodeLength) { if(inUnit<chars_.charAt(pos++)) { length>>=1; pos=jumpByDelta(chars_, pos); } else { length=length-(length>>1); pos=skipDelta(chars_, pos); } } // Drop down to linear search for the last few units. // length>=2 because the loop body above sees length>kMaxBranchLinearSubNodeLength>=3 // and divides length by 2. do { if(inUnit==chars_.charAt(pos++)) { Result result; int node=chars_.charAt(pos); if((node&kValueIsFinal)!=0) { // Leave the final value for getValue() to read. result=Result.FINAL_VALUE; } else { // Use the non-final value as the jump delta. ++pos; // int delta=readValue(pos, node); int delta; if(node<kMinTwoUnitValueLead) { delta=node; } else if(node<kThreeUnitValueLead) { delta=((node-kMinTwoUnitValueLead)<<16)|chars_.charAt(pos++); } else { delta=(chars_.charAt(pos)<<16)|chars_.charAt(pos+1); pos+=2; } // end readValue() pos+=delta; node=chars_.charAt(pos); result= node>=kMinValueLead ? valueResults_[node>>15] : Result.NO_VALUE; } pos_=pos; return result; } --length; pos=skipValue(chars_, pos); } while(length>1); if(inUnit==chars_.charAt(pos++)) { pos_=pos; int node=chars_.charAt(pos); return node>=kMinValueLead ? valueResults_[node>>15] : Result.NO_VALUE; } else { stop(); return Result.NO_MATCH; } } // Requires remainingLength_<0. private Result nextImpl(int pos, int inUnit) { int node=chars_.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { return branchNext(pos, node, inUnit); } else if(node<kMinValueLead) { // Match the first of length+1 units. int length=node-kMinLinearMatch; // Actual match length minus 1. if(inUnit==chars_.charAt(pos++)) { remainingMatchLength_=--length; pos_=pos; return (length<0 && (node=chars_.charAt(pos))>=kMinValueLead) ? valueResults_[node>>15] : Result.NO_VALUE; } else { // No match. break; } } else if((node&kValueIsFinal)!=0) { // No further matching units. break; } else { // Skip intermediate value. pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } stop(); return Result.NO_MATCH; } // Helper functions for getUniqueValue(). // Recursively finds a unique value (or whether there is not a unique one) // from a branch. // uniqueValue: On input, same as for getUniqueValue()/findUniqueValue(). // On return, if not 0, then bits 63..33 contain the updated non-negative pos. private static long findUniqueValueFromBranch(CharSequence chars, int pos, int length, long uniqueValue) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit uniqueValue=findUniqueValueFromBranch(chars, jumpByDelta(chars, pos), length>>1, uniqueValue); if(uniqueValue==0) { return 0; } length=length-(length>>1); pos=skipDelta(chars, pos); } do { ++pos; // ignore a comparison unit // handle its value int node=chars.charAt(pos++); boolean isFinal=(node&kValueIsFinal)!=0; node&=0x7fff; int value=readValue(chars, pos, node); pos=skipValue(pos, node); if(isFinal) { if(uniqueValue!=0) { if(value!=(int)(uniqueValue>>1)) { return 0; } } else { uniqueValue=((long)value<<1)|1; } } else { uniqueValue=findUniqueValue(chars, pos+value, uniqueValue); if(uniqueValue==0) { return 0; } } } while(--length>1); // ignore the last comparison byte return ((long)(pos+1)<<33)|(uniqueValue&0x1ffffffffL); } // Recursively finds a unique value (or whether there is not a unique one) // starting from a position on a node lead unit. // uniqueValue: If there is one, then bits 32..1 contain the value and bit 0 is set. // Otherwise, uniqueValue is 0. Bits 63..33 are ignored. private static long findUniqueValue(CharSequence chars, int pos, long uniqueValue) { int node=chars.charAt(pos++); for(;;) { if(node<kMinLinearMatch) { if(node==0) { node=chars.charAt(pos++); } uniqueValue=findUniqueValueFromBranch(chars, pos, node+1, uniqueValue); if(uniqueValue==0) { return 0; } pos=(int)(uniqueValue>>>33); node=chars.charAt(pos++); } else if(node<kMinValueLead) { // linear-match node pos+=node-kMinLinearMatch+1; // Ignore the match units. node=chars.charAt(pos++); } else { boolean isFinal=(node&kValueIsFinal)!=0; int value; if(isFinal) { value=readValue(chars, pos, node&0x7fff); } else { value=readNodeValue(chars, pos, node); } if(uniqueValue!=0) { if(value!=(int)(uniqueValue>>1)) { return 0; } } else { uniqueValue=((long)value<<1)|1; } if(isFinal) { return uniqueValue; } pos=skipNodeValue(pos, node); node&=kNodeTypeMask; } } } // Helper functions for getNextChars(). // getNextChars() when pos is on a branch node. private static void getNextBranchChars(CharSequence chars, int pos, int length, Appendable out) { while(length>kMaxBranchLinearSubNodeLength) { ++pos; // ignore the comparison unit getNextBranchChars(chars, jumpByDelta(chars, pos), length>>1, out); length=length-(length>>1); pos=skipDelta(chars, pos); } do { append(out, chars.charAt(pos++)); pos=skipValue(chars, pos); } while(--length>1); append(out, chars.charAt(pos)); } private static void append(Appendable out, int c) { try { out.append((char)c); } catch(IOException e) { throw new ICUUncheckedIOException(e); } } // CharsTrie data structure // // The trie consists of a series of char-serialized nodes for incremental // Unicode string/char sequence matching. (char=16-bit unsigned integer) // The root node is at the beginning of the trie data. // // Types of nodes are distinguished by their node lead unit ranges. // After each node, except a final-value node, another node follows to // encode match values or continue matching further units. // // Node types: // - Final-value node: Stores a 32-bit integer in a compact, variable-length format. // The value is for the string/char sequence so far. // - Match node, optionally with an intermediate value in a different compact format. // The value, if present, is for the string/char sequence so far. // // Aside from the value, which uses the node lead unit's high bits: // // - Linear-match node: Matches a number of units. // - Branch node: Branches to other nodes according to the current input unit. // The node unit is the length of the branch (number of units to select from) // minus 1. It is followed by a sub-node: // - If the length is at most kMaxBranchLinearSubNodeLength, then // there are length-1 (key, value) pairs and then one more comparison unit. // If one of the key units matches, then the value is either a final value for // the string so far, or a "jump" delta to the next node. // If the last unit matches, then matching continues with the next node. // (Values have the same encoding as final-value nodes.) // - If the length is greater than kMaxBranchLinearSubNodeLength, then // there is one unit and one "jump" delta. // If the input unit is less than the sub-node unit, then "jump" by delta to // the next sub-node which will have a length of length/2. // (The delta has its own compact encoding.) // Otherwise, skip the "jump" delta to the next sub-node // which will have a length of length-length/2. // Match-node lead unit values, after masking off intermediate-value bits: // 0000..002f: Branch node. If node!=0 then the length is node+1, otherwise // the length is one more than the next unit. // For a branch sub-node with at most this many entries, we drop down // to a linear search. /*package*/ static final int kMaxBranchLinearSubNodeLength=5; // 0030..003f: Linear-match node, match 1..16 units and continue reading the next node. /*package*/ static final int kMinLinearMatch=0x30; /*package*/ static final int kMaxLinearMatchLength=0x10; // Match-node lead unit bits 14..6 for the optional intermediate value. // If these bits are 0, then there is no intermediate value. // Otherwise, see the *NodeValue* constants below. /*package*/ static final int kMinValueLead=kMinLinearMatch+kMaxLinearMatchLength; // 0x0040 /*package*/ static final int kNodeTypeMask=kMinValueLead-1; // 0x003f // A final-value node has bit 15 set. /*package*/ static final int kValueIsFinal=0x8000; // Compact value: After testing and masking off bit 15, use the following thresholds. /*package*/ static final int kMaxOneUnitValue=0x3fff; /*package*/ static final int kMinTwoUnitValueLead=kMaxOneUnitValue+1; // 0x4000 /*package*/ static final int kThreeUnitValueLead=0x7fff; /*package*/ static final int kMaxTwoUnitValue=((kThreeUnitValueLead-kMinTwoUnitValueLead)<<16)-1; // 0x3ffeffff // Compact intermediate-value integer, lead unit shared with a branch or linear-match node. /*package*/ static final int kMaxOneUnitNodeValue=0xff; /*package*/ static final int kMinTwoUnitNodeValueLead=kMinValueLead+((kMaxOneUnitNodeValue+1)<<6); // 0x4040 /*package*/ static final int kThreeUnitNodeValueLead=0x7fc0; /*package*/ static final int kMaxTwoUnitNodeValue= ((kThreeUnitNodeValueLead-kMinTwoUnitNodeValueLead)<<10)-1; // 0xfdffff // Compact delta integers. /*package*/ static final int kMaxOneUnitDelta=0xfbff; /*package*/ static final int kMinTwoUnitDeltaLead=kMaxOneUnitDelta+1; // 0xfc00 /*package*/ static final int kThreeUnitDeltaLead=0xffff; /*package*/ static final int kMaxTwoUnitDelta=((kThreeUnitDeltaLead-kMinTwoUnitDeltaLead)<<16)-1; // 0x03feffff // Fixed value referencing the CharsTrie words. private CharSequence chars_; private int root_; // Iterator variables. // Pointer to next trie unit to read. NULL if no more matches. private int pos_; // Remaining length of a linear-match node, minus 1. Negative if not in such a node. private int remainingMatchLength_; }
googleapis/google-cloud-java
37,856
java-cloudsecuritycompliance/proto-google-cloud-cloudsecuritycompliance-v1/src/main/java/com/google/cloud/cloudsecuritycompliance/v1/CreateFrameworkRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/cloudsecuritycompliance/v1/config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.cloudsecuritycompliance.v1; /** * * * <pre> * Request message for creating a Framework * </pre> * * Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest} */ public final class CreateFrameworkRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) CreateFrameworkRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateFrameworkRequest.newBuilder() to construct. private CreateFrameworkRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateFrameworkRequest() { parent_ = ""; frameworkId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateFrameworkRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CreateFrameworkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CreateFrameworkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest.class, com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FRAMEWORK_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object frameworkId_ = ""; /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The frameworkId. */ @java.lang.Override public java.lang.String getFrameworkId() { java.lang.Object ref = frameworkId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); frameworkId_ = s; return s; } } /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for frameworkId. */ @java.lang.Override public com.google.protobuf.ByteString getFrameworkIdBytes() { java.lang.Object ref = frameworkId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); frameworkId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FRAMEWORK_FIELD_NUMBER = 3; private com.google.cloud.cloudsecuritycompliance.v1.Framework framework_; /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the framework field is set. */ @java.lang.Override public boolean hasFramework() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The framework. */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.Framework getFramework() { return framework_ == null ? com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance() : framework_; } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder getFrameworkOrBuilder() { return framework_ == null ? com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance() : framework_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(frameworkId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, frameworkId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getFramework()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(frameworkId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, frameworkId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getFramework()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest)) { return super.equals(obj); } com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest other = (com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFrameworkId().equals(other.getFrameworkId())) return false; if (hasFramework() != other.hasFramework()) return false; if (hasFramework()) { if (!getFramework().equals(other.getFramework())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FRAMEWORK_ID_FIELD_NUMBER; hash = (53 * hash) + getFrameworkId().hashCode(); if (hasFramework()) { hash = (37 * hash) + FRAMEWORK_FIELD_NUMBER; hash = (53 * hash) + getFramework().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for creating a Framework * </pre> * * Protobuf type {@code google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CreateFrameworkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CreateFrameworkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest.class, com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest.Builder.class); } // Construct using // com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFrameworkFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; frameworkId_ = ""; framework_ = null; if (frameworkBuilder_ != null) { frameworkBuilder_.dispose(); frameworkBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.cloudsecuritycompliance.v1.ConfigProto .internal_static_google_cloud_cloudsecuritycompliance_v1_CreateFrameworkRequest_descriptor; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest getDefaultInstanceForType() { return com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest build() { com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest buildPartial() { com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest result = new com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.frameworkId_ = frameworkId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.framework_ = frameworkBuilder_ == null ? framework_ : frameworkBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) { return mergeFrom( (com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest other) { if (other == com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFrameworkId().isEmpty()) { frameworkId_ = other.frameworkId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasFramework()) { mergeFramework(other.getFramework()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { frameworkId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getFrameworkFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent resource name, in the format * `organizations/{organization}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object frameworkId_ = ""; /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The frameworkId. */ public java.lang.String getFrameworkId() { java.lang.Object ref = frameworkId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); frameworkId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for frameworkId. */ public com.google.protobuf.ByteString getFrameworkIdBytes() { java.lang.Object ref = frameworkId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); frameworkId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The frameworkId to set. * @return This builder for chaining. */ public Builder setFrameworkId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } frameworkId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearFrameworkId() { frameworkId_ = getDefaultInstance().getFrameworkId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. ID of the framework. * This is not the full name of the framework. * This is the last part of the full name of the framework. * </pre> * * <code>string framework_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for frameworkId to set. * @return This builder for chaining. */ public Builder setFrameworkIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); frameworkId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.cloudsecuritycompliance.v1.Framework framework_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.Framework, com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder, com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder> frameworkBuilder_; /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the framework field is set. */ public boolean hasFramework() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The framework. */ public com.google.cloud.cloudsecuritycompliance.v1.Framework getFramework() { if (frameworkBuilder_ == null) { return framework_ == null ? com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance() : framework_; } else { return frameworkBuilder_.getMessage(); } } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFramework(com.google.cloud.cloudsecuritycompliance.v1.Framework value) { if (frameworkBuilder_ == null) { if (value == null) { throw new NullPointerException(); } framework_ = value; } else { frameworkBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFramework( com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder builderForValue) { if (frameworkBuilder_ == null) { framework_ = builderForValue.build(); } else { frameworkBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeFramework(com.google.cloud.cloudsecuritycompliance.v1.Framework value) { if (frameworkBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && framework_ != null && framework_ != com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance()) { getFrameworkBuilder().mergeFrom(value); } else { framework_ = value; } } else { frameworkBuilder_.mergeFrom(value); } if (framework_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearFramework() { bitField0_ = (bitField0_ & ~0x00000004); framework_ = null; if (frameworkBuilder_ != null) { frameworkBuilder_.dispose(); frameworkBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder getFrameworkBuilder() { bitField0_ |= 0x00000004; onChanged(); return getFrameworkFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder getFrameworkOrBuilder() { if (frameworkBuilder_ != null) { return frameworkBuilder_.getMessageOrBuilder(); } else { return framework_ == null ? com.google.cloud.cloudsecuritycompliance.v1.Framework.getDefaultInstance() : framework_; } } /** * * * <pre> * Required. The resource being created * </pre> * * <code> * .google.cloud.cloudsecuritycompliance.v1.Framework framework = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.Framework, com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder, com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder> getFrameworkFieldBuilder() { if (frameworkBuilder_ == null) { frameworkBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.cloudsecuritycompliance.v1.Framework, com.google.cloud.cloudsecuritycompliance.v1.Framework.Builder, com.google.cloud.cloudsecuritycompliance.v1.FrameworkOrBuilder>( getFramework(), getParentForChildren(), isClean()); framework_ = null; } return frameworkBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) } // @@protoc_insertion_point(class_scope:google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest) private static final com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest(); } public static com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateFrameworkRequest> PARSER = new com.google.protobuf.AbstractParser<CreateFrameworkRequest>() { @java.lang.Override public CreateFrameworkRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateFrameworkRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateFrameworkRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/pulsar
38,005
managed-ledger/src/test/java/org/apache/bookkeeper/mledger/impl/NonDurableCursorTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.bookkeeper.mledger.impl; import static java.nio.charset.StandardCharsets.UTF_8; import static org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl.ENTRIES_ADDED_COUNTER_UPDATER; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import com.google.common.collect.Iterables; import io.netty.buffer.ByteBuf; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import lombok.Cleanup; import org.apache.bookkeeper.mledger.AsyncCallbacks; import org.apache.bookkeeper.mledger.AsyncCallbacks.AddEntryCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.MarkDeleteCallback; import org.apache.bookkeeper.mledger.Entry; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.ManagedLedger; import org.apache.bookkeeper.mledger.ManagedLedgerConfig; import org.apache.bookkeeper.mledger.ManagedLedgerException; import org.apache.bookkeeper.mledger.ManagedLedgerFactory; import org.apache.bookkeeper.mledger.ManagedLedgerFactoryConfig; import org.apache.bookkeeper.mledger.Position; import org.apache.bookkeeper.mledger.PositionFactory; import org.apache.bookkeeper.test.MockedBookKeeperTestCase; import org.apache.commons.lang3.tuple.Pair; import org.apache.pulsar.common.api.proto.CommandSubscribe; import org.awaitility.Awaitility; import org.mockito.Mockito; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.Test; public class NonDurableCursorTest extends MockedBookKeeperTestCase { private static final Charset Encoding = StandardCharsets.UTF_8; @Test(timeOut = 20000) void readFromEmptyLedger() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger"); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.EARLIEST); List<Entry> entries = c1.readEntries(10); assertEquals(entries.size(), 0); entries.forEach(Entry::release); ledger.addEntry("test".getBytes(Encoding)); entries = c1.readEntries(10); assertEquals(entries.size(), 1); entries.forEach(Entry::release); entries = c1.readEntries(10); assertEquals(entries.size(), 0); entries.forEach(Entry::release); // Test string representation assertEquals(c1.toString(), "NonDurableCursorImpl{ledger=my_test_ledger, cursor=" + c1.getName() + ", ackPos=3:-1, readPos=3:1}"); } @Test(timeOut = 20000) void testOpenNonDurableCursorAtNonExistentMessageId() throws Exception { ManagedLedger ledger = factory.open("non_durable_cursor_at_non_existent_msgid"); ManagedLedgerImpl mlImpl = (ManagedLedgerImpl) ledger; Position position = mlImpl.getLastPosition(); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.create( position.getLedgerId(), position.getEntryId() - 1 )); assertEquals(c1.getReadPosition(), PositionFactory.create( position.getLedgerId(), 0 )); c1.close(); ledger.close(); } @Test(timeOut = 20000) void testOpenNonDurableCursorWhileLedgerIsAddingFirstEntryAfterTrimmed() throws Exception { ManagedLedgerConfig config = new ManagedLedgerConfig().setMaxEntriesPerLedger(1) .setRetentionTime(0, TimeUnit.MILLISECONDS); config.setMinimumRolloverTime(0, TimeUnit.MILLISECONDS); @Cleanup ManagedLedgerImpl ledgerSpy = Mockito.spy((ManagedLedgerImpl) factory.open("non_durable_cursor_while_ledger_trimmed", config)); ledgerSpy.addEntry("message1".getBytes()); ledgerSpy.rollCurrentLedgerIfFull(); Awaitility.waitAtMost(5, TimeUnit.SECONDS).until(() -> ledgerSpy.getLedgersInfoAsList().size() > 1 ); CompletableFuture<Void> trimFuture = new CompletableFuture<>(); ledgerSpy.trimConsumedLedgersInBackground(trimFuture); trimFuture.join(); // Use (currentLedgerId, -1) as startCursorPosition after ledger was trimmed Position startCursorPosition = PositionFactory.create(ledgerSpy.getCurrentLedger().getId(), -1); assertTrue(startCursorPosition.compareTo(ledgerSpy.lastConfirmedEntry) > 0); CountDownLatch getLastPositionLatch = new CountDownLatch(1); CountDownLatch newNonDurableCursorLatch = new CountDownLatch(1); Mockito.when(ledgerSpy.getLastPositionAndCounter()).then((Answer<Pair<Position, Long>>) invocation -> { newNonDurableCursorLatch.countDown(); getLastPositionLatch.await(); return Pair.of(ledgerSpy.lastConfirmedEntry, ENTRIES_ADDED_COUNTER_UPDATER.get(ledgerSpy)); }); CompletableFuture<ManagedCursor> cursorFuture = new CompletableFuture<ManagedCursor>() .completeAsync(() -> new NonDurableCursorImpl(bkc, ledgerSpy, "my_test_cursor", startCursorPosition, CommandSubscribe.InitialPosition.Latest, false) ); Position oldLastConfirmedEntry = ledgerSpy.lastConfirmedEntry; // Wait until NonDurableCursorImpl constructor invokes ManagedLedgerImpl.getLastPositionAndCounter newNonDurableCursorLatch.await(); // Add first entry after ledger was trimmed ledgerSpy.addEntry("message2".getBytes()); assertTrue(oldLastConfirmedEntry.compareTo(ledgerSpy.lastConfirmedEntry) < 0); // Unblock NonDurableCursorImpl constructor getLastPositionLatch.countDown(); // cursor should read from lastConfirmedEntry ManagedCursor cursor = cursorFuture.join(); assertEquals(cursor.getReadPosition(), ledgerSpy.lastConfirmedEntry); } @Test(timeOut = 20000) void testZNodeBypassed() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger"); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.EARLIEST); assertTrue(ledger.getCursors().iterator().hasNext()); c1.close(); ledger.close(); // Re-open ManagedLedger ledger2 = factory.open("my_test_ledger"); assertTrue(!ledger2.getCursors().iterator().hasNext()); } @Test(timeOut = 20000) void readTwice() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.LATEST); ManagedCursor c2 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("entry-1".getBytes(Encoding)); ledger.addEntry("entry-2".getBytes(Encoding)); List<Entry> entries = c1.readEntries(2); assertEquals(entries.size(), 2); entries.forEach(Entry::release); entries = c1.readEntries(2); assertEquals(entries.size(), 0); entries.forEach(Entry::release); entries = c2.readEntries(2); assertEquals(entries.size(), 2); entries.forEach(Entry::release); entries = c2.readEntries(2); assertEquals(entries.size(), 0); entries.forEach(Entry::release); } @Test(timeOut = 20000) void readWithCacheDisabled() throws Exception { ManagedLedgerFactoryConfig config = new ManagedLedgerFactoryConfig(); config.setMaxCacheSize(0); @Cleanup("shutdown") ManagedLedgerFactoryImpl factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, config); ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.LATEST); ManagedCursor c2 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("entry-1".getBytes(Encoding)); ledger.addEntry("entry-2".getBytes(Encoding)); List<Entry> entries = c1.readEntries(2); assertEquals(entries.size(), 2); assertEquals(new String(entries.get(0).getData(), Encoding), "entry-1"); assertEquals(new String(entries.get(1).getData(), Encoding), "entry-2"); entries.forEach(Entry::release); entries = c1.readEntries(2); assertEquals(entries.size(), 0); entries.forEach(Entry::release); entries = c2.readEntries(2); assertEquals(entries.size(), 2); entries.forEach(Entry::release); entries = c2.readEntries(2); assertEquals(entries.size(), 0); entries.forEach(Entry::release); } @Test(timeOut = 20000) void readFromClosedLedger() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.close(); try { c1.readEntries(2); fail("ledger is closed, should fail"); } catch (ManagedLedgerException e) { // ok } } @Test(timeOut = 20000) void testNumberOfEntries() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(2) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-1".getBytes(Encoding)); ManagedCursor c2 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-2".getBytes(Encoding)); ManagedCursor c3 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-3".getBytes(Encoding)); ManagedCursor c4 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-4".getBytes(Encoding)); ManagedCursor c5 = ledger.newNonDurableCursor(PositionFactory.LATEST); assertEquals(c1.getNumberOfEntries(), 4); assertTrue(c1.hasMoreEntries()); assertEquals(c2.getNumberOfEntries(), 3); assertTrue(c2.hasMoreEntries()); assertEquals(c3.getNumberOfEntries(), 2); assertTrue(c3.hasMoreEntries()); assertEquals(c4.getNumberOfEntries(), 1); assertTrue(c4.hasMoreEntries()); assertEquals(c5.getNumberOfEntries(), 0); assertFalse(c5.hasMoreEntries()); List<Entry> entries = c1.readEntries(2); assertEquals(entries.size(), 2); c1.markDelete(entries.get(1).getPosition()); assertEquals(c1.getNumberOfEntries(), 2); entries.forEach(Entry::release); } @Test(timeOut = 20000) void testNumberOfEntriesInBacklog() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(2) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.LATEST); Position p1 = ledger.addEntry("dummy-entry-1".getBytes(Encoding)); ManagedCursor c2 = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-2".getBytes(Encoding)); ManagedCursor c3 = ledger.newNonDurableCursor(PositionFactory.LATEST); Position p3 = ledger.addEntry("dummy-entry-3".getBytes(Encoding)); ManagedCursor c4 = ledger.newNonDurableCursor(PositionFactory.LATEST); Position p4 = ledger.addEntry("dummy-entry-4".getBytes(Encoding)); ManagedCursor c5 = ledger.newNonDurableCursor(PositionFactory.LATEST); assertEquals(c1.getNumberOfEntriesInBacklog(false), 4); assertEquals(c2.getNumberOfEntriesInBacklog(false), 3); assertEquals(c3.getNumberOfEntriesInBacklog(false), 2); assertEquals(c4.getNumberOfEntriesInBacklog(false), 1); assertEquals(c5.getNumberOfEntriesInBacklog(false), 0); List<Entry> entries = c1.readEntries(2); assertEquals(entries.size(), 2); entries.forEach(Entry::release); assertEquals(c1.getNumberOfEntries(), 2); assertEquals(c1.getNumberOfEntriesInBacklog(false), 4); c1.markDelete(p1); assertEquals(c1.getNumberOfEntries(), 2); assertEquals(c1.getNumberOfEntriesInBacklog(false), 3); c1.delete(p3); assertEquals(c1.getNumberOfEntries(), 1); assertEquals(c1.getNumberOfEntriesInBacklog(false), 2); c1.markDelete(p4); assertEquals(c1.getNumberOfEntries(), 0); assertEquals(c1.getNumberOfEntriesInBacklog(false), 0); } @Test(timeOut = 20000) void markDeleteWithErrors() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger"); ManagedCursor cursor = ledger.openCursor("c1"); ledger.addEntry("dummy-entry-1".getBytes(Encoding)); ledger.addEntry("dummy-entry-2".getBytes(Encoding)); List<Entry> entries = cursor.readEntries(100); assertEquals(entries.size(), 2); cursor.markDelete(entries.get(0).getPosition()); stopBookKeeper(); // Mark-delete should succeed if BK is down cursor.markDelete(entries.get(1).getPosition()); entries.forEach(Entry::release); } @Test(timeOut = 20000) void markDeleteAcrossLedgers() throws Exception { ManagedLedger ml1 = factory.open("my_test_ledger"); ManagedCursor mc1 = ml1.openCursor("c1"); // open ledger id 3 for ml1 // markDeletePosition for mc1 is 3:-1 // readPosition is 3:0 ml1.close(); mc1.close(); // force removal of this ledger from the cache factory.close(ml1); ManagedLedger ml2 = factory.open("my_test_ledger"); ManagedCursor mc2 = ml2.openCursor("c1"); // open ledger id 5 for ml2 // this entry is written at 5:0 Position pos = ml2.addEntry("dummy-entry-1".getBytes(Encoding)); List<Entry> entries = mc2.readEntries(1); assertEquals(entries.size(), 1); assertEquals(new String(entries.get(0).getData(), Encoding), "dummy-entry-1"); entries.forEach(Entry::release); mc2.delete(pos); // verify if the markDeletePosition moves from 3:-1 to 5:0 assertEquals(mc2.getMarkDeletedPosition(), pos); assertEquals(mc2.getMarkDeletedPosition().getNext(), mc2.getReadPosition()); } @Test(timeOut = 20000) void markDeleteGreaterThanLastConfirmedEntry() throws Exception { ManagedLedger ml1 = factory.open("my_test_ledger"); ManagedCursor mc1 = ml1.newNonDurableCursor(PositionFactory.create(Long.MAX_VALUE - 1, Long.MAX_VALUE - 1)); assertEquals(mc1.getMarkDeletedPosition(), ml1.getLastConfirmedEntry()); } @Test(timeOut = 20000) void testResetCursor() throws Exception { ManagedLedger ledger = factory.open("my_test_move_cursor_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(10)); ManagedCursor cursor = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-1".getBytes(Encoding)); ledger.addEntry("dummy-entry-2".getBytes(Encoding)); ledger.addEntry("dummy-entry-3".getBytes(Encoding)); Position lastPosition = ledger.addEntry("dummy-entry-4".getBytes(Encoding)); final AtomicBoolean moveStatus = new AtomicBoolean(false); Position resetPosition = PositionFactory.create(lastPosition.getLedgerId(), lastPosition.getEntryId() - 2); try { cursor.resetCursor(resetPosition); moveStatus.set(true); } catch (Exception e) { log.warn("error in reset cursor", e.getCause()); } assertTrue(moveStatus.get()); assertEquals(resetPosition, cursor.getReadPosition()); cursor.close(); ledger.close(); } @Test(timeOut = 20000) void testasyncResetCursor() throws Exception { ManagedLedger ledger = factory.open("my_test_move_cursor_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(10)); ManagedCursor cursor = ledger.newNonDurableCursor(PositionFactory.LATEST); ledger.addEntry("dummy-entry-1".getBytes(Encoding)); ledger.addEntry("dummy-entry-2".getBytes(Encoding)); ledger.addEntry("dummy-entry-3".getBytes(Encoding)); Position lastPosition = ledger.addEntry("dummy-entry-4".getBytes(Encoding)); final AtomicBoolean moveStatus = new AtomicBoolean(false); CountDownLatch countDownLatch = new CountDownLatch(1); Position resetPosition = PositionFactory.create(lastPosition.getLedgerId(), lastPosition.getEntryId() - 2); cursor.asyncResetCursor(resetPosition, false, new AsyncCallbacks.ResetCursorCallback() { @Override public void resetComplete(Object ctx) { moveStatus.set(true); countDownLatch.countDown(); } @Override public void resetFailed(ManagedLedgerException exception, Object ctx) { moveStatus.set(false); countDownLatch.countDown(); } }); countDownLatch.await(); assertTrue(moveStatus.get()); assertEquals(resetPosition, cursor.getReadPosition()); cursor.close(); ledger.close(); } @Test(timeOut = 20000) void rewind() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(2) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.EARLIEST); Position p1 = ledger.addEntry("dummy-entry-1".getBytes(Encoding)); Position p2 = ledger.addEntry("dummy-entry-2".getBytes(Encoding)); Position p3 = ledger.addEntry("dummy-entry-3".getBytes(Encoding)); Position p4 = ledger.addEntry("dummy-entry-4".getBytes(Encoding)); log.debug("p1: {}", p1); log.debug("p2: {}", p2); log.debug("p3: {}", p3); log.debug("p4: {}", p4); assertEquals(c1.getNumberOfEntries(), 4); assertEquals(c1.getNumberOfEntriesInBacklog(false), 4); c1.markDelete(p1); assertEquals(c1.getNumberOfEntries(), 3); assertEquals(c1.getNumberOfEntriesInBacklog(false), 3); List<Entry> entries = c1.readEntries(10); assertEquals(entries.size(), 3); entries.forEach(Entry::release); assertEquals(c1.getNumberOfEntries(), 0); assertEquals(c1.getNumberOfEntriesInBacklog(false), 3); c1.rewind(); assertEquals(c1.getNumberOfEntries(), 3); assertEquals(c1.getNumberOfEntriesInBacklog(false), 3); c1.markDelete(p2); assertEquals(c1.getNumberOfEntries(), 2); assertEquals(c1.getNumberOfEntriesInBacklog(false), 2); entries = c1.readEntries(10); assertEquals(entries.size(), 2); entries.forEach(Entry::release); assertEquals(c1.getNumberOfEntries(), 0); assertEquals(c1.getNumberOfEntriesInBacklog(false), 2); c1.rewind(); assertEquals(c1.getNumberOfEntries(), 2); c1.markDelete(p4); assertEquals(c1.getNumberOfEntries(), 0); assertEquals(c1.getNumberOfEntriesInBacklog(false), 0); c1.rewind(); assertEquals(c1.getNumberOfEntries(), 0); ledger.addEntry("dummy-entry-5".getBytes(Encoding)); assertEquals(c1.getNumberOfEntries(), 1); ledger.addEntry("dummy-entry-6".getBytes(Encoding)); assertEquals(c1.getNumberOfEntries(), 2); } @Test(timeOut = 20000) void markDeleteSkippingMessage() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(10)); ManagedCursor cursor = ledger.newNonDurableCursor(PositionFactory.EARLIEST); Position p1 = ledger.addEntry("dummy-entry-1".getBytes(Encoding)); Position p2 = ledger.addEntry("dummy-entry-2".getBytes(Encoding)); ledger.addEntry("dummy-entry-3".getBytes(Encoding)); Position p4 = ledger.addEntry("dummy-entry-4".getBytes(Encoding)); assertEquals(cursor.getNumberOfEntries(), 4); cursor.markDelete(p1); assertTrue(cursor.hasMoreEntries()); assertEquals(cursor.getNumberOfEntries(), 3); assertEquals(cursor.getReadPosition(), p2); List<Entry> entries = cursor.readEntries(1); assertEquals(entries.size(), 1); assertEquals(new String(entries.get(0).getData(), Encoding), "dummy-entry-2"); entries.forEach(Entry::release); cursor.markDelete(p4); assertFalse(cursor.hasMoreEntries()); assertEquals(cursor.getNumberOfEntries(), 0); assertEquals(cursor.getReadPosition(), PositionFactory.create(p4.getLedgerId(), p4.getEntryId() + 1)); } @Test(timeOut = 20000) public void asyncMarkDeleteBlocking() throws Exception { ManagedLedgerConfig config = new ManagedLedgerConfig(); config.setMaxEntriesPerLedger(10); config.setMetadataMaxEntriesPerLedger(5); ManagedLedger ledger = factory.open("my_test_ledger", config); final ManagedCursor c1 = ledger.openCursor("c1"); final AtomicReference<Position> lastPosition = new AtomicReference<Position>(); final int num = 100; final CountDownLatch latch = new CountDownLatch(num); for (int i = 0; i < num; i++) { ledger.asyncAddEntry("entry".getBytes(Encoding), new AddEntryCallback() { @Override public void addFailed(ManagedLedgerException exception, Object ctx) { } @Override public void addComplete(Position position, ByteBuf entryData, Object ctx) { lastPosition.set(position); c1.asyncMarkDelete(position, new MarkDeleteCallback() { @Override public void markDeleteFailed(ManagedLedgerException exception, Object ctx) { } @Override public void markDeleteComplete(Object ctx) { latch.countDown(); } }, null); } }, null); } latch.await(); assertEquals(c1.getNumberOfEntries(), 0); // Reopen @Cleanup("shutdown") ManagedLedgerFactory factory2 = new ManagedLedgerFactoryImpl(metadataStore, bkc); ledger = factory2.open("my_test_ledger"); ManagedCursor c2 = ledger.openCursor("c1"); assertEquals(c2.getMarkDeletedPosition(), lastPosition.get()); } @Test(timeOut = 20000) void unorderedMarkDelete() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger"); final ManagedCursor c1 = ledger.openCursor("c1"); Position p1 = ledger.addEntry("entry-1".getBytes(Encoding)); Position p2 = ledger.addEntry("entry-2".getBytes(Encoding)); c1.markDelete(p2); try { c1.markDelete(p1); fail("Should have thrown exception"); } catch (ManagedLedgerException e) { // ok } assertEquals(c1.getMarkDeletedPosition(), p2); } @Test(timeOut = 20000) void testSingleDelete() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(3) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); ManagedCursor cursor = ledger.newNonDurableCursor(PositionFactory.LATEST); Position p1 = ledger.addEntry("entry1".getBytes()); Position p2 = ledger.addEntry("entry2".getBytes()); Position p3 = ledger.addEntry("entry3".getBytes()); Position p4 = ledger.addEntry("entry4".getBytes()); Position p5 = ledger.addEntry("entry5".getBytes()); Position p6 = ledger.addEntry("entry6".getBytes()); Position p0 = cursor.getMarkDeletedPosition(); cursor.delete(p4); assertEquals(cursor.getMarkDeletedPosition(), p0); cursor.delete(p1); assertEquals(cursor.getMarkDeletedPosition(), p1); cursor.delete(p3); // Delete will silently succeed cursor.delete(p3); assertEquals(cursor.getMarkDeletedPosition(), p1); cursor.delete(p2); assertEquals(cursor.getMarkDeletedPosition(), p4); cursor.delete(p5); assertEquals(cursor.getMarkDeletedPosition(), p5); cursor.close(); try { cursor.delete(p6); } catch (ManagedLedgerException e) { // Ok } } @Test(timeOut = 20000) void subscribeToEarliestPositionWithImmediateDeletion() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1)); /* Position p1 = */ ledger.addEntry("entry-1".getBytes()); /* Position p2 = */ ledger.addEntry("entry-2".getBytes()); /* Position p3 = */ ledger.addEntry("entry-3".getBytes()); Thread.sleep(300); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.EARLIEST); assertEquals(c1.getReadPosition(), PositionFactory.create(6, 0)); assertEquals(c1.getMarkDeletedPosition(), PositionFactory.create(6, -1)); } @Test // (timeOut = 20000) void subscribeToEarliestPositionWithDeferredDeletion() throws Exception { ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1) .setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); Position p1 = ledger.addEntry("entry-1".getBytes()); Position p2 = ledger.addEntry("entry-2".getBytes()); /* Position p3 = */ ledger.addEntry("entry-3".getBytes()); /* Position p4 = */ ledger.addEntry("entry-4".getBytes()); /* Position p5 = */ ledger.addEntry("entry-5".getBytes()); /* Position p6 = */ ledger.addEntry("entry-6".getBytes()); ManagedCursor c1 = ledger.newNonDurableCursor(PositionFactory.EARLIEST); assertEquals(c1.getReadPosition(), p1); assertEquals(c1.getMarkDeletedPosition(), PositionFactory.create(3, -1)); assertEquals(c1.getNumberOfEntries(), 6); assertEquals(c1.getNumberOfEntriesInBacklog(false), 6); ManagedCursor c2 = ledger.newNonDurableCursor(p1); assertEquals(c2.getReadPosition(), p2); assertEquals(c2.getMarkDeletedPosition(), p1); assertEquals(c2.getNumberOfEntries(), 5); assertEquals(c2.getNumberOfEntriesInBacklog(false), 5); } @Test void testCursorWithNameIsCachable() throws Exception { final String p1CursorName = "entry-1"; final String p2CursorName = "entry-2"; ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1)); Position p1 = ledger.addEntry(p1CursorName.getBytes()); Position p2 = ledger.addEntry(p2CursorName.getBytes()); ManagedCursor c1 = ledger.newNonDurableCursor(p1, p1CursorName); ManagedCursor c2 = ledger.newNonDurableCursor(p1, p1CursorName); ManagedCursor c3 = ledger.newNonDurableCursor(p2, p2CursorName); ManagedCursor c4 = ledger.newNonDurableCursor(p2, p2CursorName); assertEquals(c1, c2); assertEquals(c3, c4); assertNotEquals(c1, c3); assertNotEquals(c2, c3); assertNotEquals(c1, c4); assertNotEquals(c2, c4); assertNotNull(c1.getName()); assertNotNull(c2.getName()); assertNotNull(c3.getName()); assertNotNull(c4.getName()); ledger.close(); } @Test public void testGetSlowestConsumer() throws Exception { final String mlName = "test-get-slowest-consumer-ml"; final String c1 = "cursor1"; final String nc1 = "non-durable-cursor1"; final String ncEarliest = "non-durable-cursor-earliest"; ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open(mlName, new ManagedLedgerConfig()); Position p1 = ledger.addEntry(c1.getBytes(UTF_8)); log.info("write entry 1 : pos = {}", p1); Position p2 = ledger.addEntry(nc1.getBytes(UTF_8)); log.info("write entry 2 : pos = {}", p2); Position p3 = ledger.addEntry(nc1.getBytes(UTF_8)); log.info("write entry 3 : pos = {}", p3); ManagedCursor cursor1 = ledger.openCursor(c1); cursor1.seek(p3); assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); ManagedCursor nonCursor1 = ledger.newNonDurableCursor(p2, nc1); // The slowest reader should still be the durable cursor since non-durable readers are not taken into account assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); Position earliestPos = PositionFactory.create(-1, -2); ManagedCursor nonCursorEarliest = ledger.newNonDurableCursor(earliestPos, ncEarliest); // The slowest reader should still be the durable cursor since non-durable readers are not taken into account assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); // move non-durable cursor should NOT update the slowest reader position nonCursorEarliest.markDelete(p1); assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); nonCursorEarliest.markDelete(p2); assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); nonCursorEarliest.markDelete(p3); assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); nonCursor1.markDelete(p3); assertEquals(p3, ledger.getCursors().getSlowestCursorPosition()); ledger.close(); } @Test public void testBacklogStatsWhenDroppingData() throws Exception { ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("testBacklogStatsWhenDroppingData", new ManagedLedgerConfig().setMaxEntriesPerLedger(1)); ManagedCursor c1 = ledger.openCursor("c1"); ManagedCursor nonDurableCursor = ledger.newNonDurableCursor(PositionFactory.EARLIEST); assertEquals(nonDurableCursor.getNumberOfEntries(), 0); assertEquals(nonDurableCursor.getNumberOfEntriesInBacklog(true), 0); List<Position> positions = new ArrayList(); for (int i = 0; i < 10; i++) { positions.add(ledger.addEntry(("entry-" + i).getBytes(UTF_8))); } assertEquals(nonDurableCursor.getNumberOfEntries(), 10); assertEquals(nonDurableCursor.getNumberOfEntriesInBacklog(true), 10); c1.markDelete(positions.get(4)); assertEquals(c1.getNumberOfEntries(), 5); assertEquals(c1.getNumberOfEntriesInBacklog(true), 5); // Since the durable cursor has moved, the data will be trimmed CompletableFuture<Void> promise = new CompletableFuture<>(); ledger.internalTrimConsumedLedgers(promise); promise.join(); // The mark delete position has moved to position 4:1, and the ledger 4 only has one entry, // so the ledger 4 can be deleted. nonDurableCursor should has the same backlog with durable cursor. assertEquals(nonDurableCursor.getNumberOfEntries(), 5); assertEquals(nonDurableCursor.getNumberOfEntriesInBacklog(true), 5); c1.close(); ledger.deleteCursor(c1.getName()); promise = new CompletableFuture<>(); ledger.internalTrimConsumedLedgers(promise); promise.join(); assertEquals(nonDurableCursor.getNumberOfEntries(), 0); assertEquals(nonDurableCursor.getNumberOfEntriesInBacklog(true), 0); ledger.close(); } @Test public void testInvalidateReadHandleWithSlowNonDurableCursor() throws Exception { ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("testInvalidateReadHandleWithSlowNonDurableCursor", new ManagedLedgerConfig().setMaxEntriesPerLedger(1).setRetentionTime(-1, TimeUnit.SECONDS) .setRetentionSizeInMB(-1)); ManagedCursor c1 = ledger.openCursor("c1"); ManagedCursor nonDurableCursor = ledger.newNonDurableCursor(PositionFactory.EARLIEST); List<Position> positions = new ArrayList<>(); for (int i = 0; i < 10; i++) { positions.add(ledger.addEntry(("entry-" + i).getBytes(UTF_8))); } CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { ledger.asyncReadEntry(positions.get(i), new AsyncCallbacks.ReadEntryCallback() { @Override public void readEntryComplete(Entry entry, Object ctx) { latch.countDown(); } @Override public void readEntryFailed(ManagedLedgerException exception, Object ctx) { latch.countDown(); } }, null); } latch.await(); c1.markDelete(positions.get(4)); CompletableFuture<Void> promise = new CompletableFuture<>(); ledger.internalTrimConsumedLedgers(promise); promise.join(); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(0).getLedgerId())); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(1).getLedgerId())); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(2).getLedgerId())); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(3).getLedgerId())); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(4).getLedgerId())); promise = new CompletableFuture<>(); nonDurableCursor.markDelete(positions.get(3)); ledger.internalTrimConsumedLedgers(promise); promise.join(); Assert.assertFalse(ledger.ledgerCache.containsKey(positions.get(0).getLedgerId())); Assert.assertFalse(ledger.ledgerCache.containsKey(positions.get(1).getLedgerId())); Assert.assertFalse(ledger.ledgerCache.containsKey(positions.get(2).getLedgerId())); Assert.assertFalse(ledger.ledgerCache.containsKey(positions.get(3).getLedgerId())); Assert.assertTrue(ledger.ledgerCache.containsKey(positions.get(4).getLedgerId())); ledger.close(); } @Test(expectedExceptions = NullPointerException.class) void testCursorWithNameIsNotNull() throws Exception { final String p1CursorName = "entry-1"; ManagedLedger ledger = factory.open("my_test_ledger", new ManagedLedgerConfig().setMaxEntriesPerLedger(1)); Position p1 = ledger.addEntry(p1CursorName.getBytes()); try { ledger.newNonDurableCursor(p1, null); } catch (NullPointerException npe) { assertEquals(npe.getMessage(), "cursor name can't be null"); throw npe; } finally { ledger.close(); } } @Test void deleteNonDurableCursorWithName() throws Exception { ManagedLedger ledger = factory.open("deleteManagedLedgerWithNonDurableCursor"); ManagedCursor c = ledger.newNonDurableCursor(PositionFactory.EARLIEST, "custom-name"); assertEquals(Iterables.size(ledger.getCursors()), 1); ledger.deleteCursor(c.getName()); assertEquals(Iterables.size(ledger.getCursors()), 0); } @Test public void testMessagesConsumedCounterInitializedCorrect() throws Exception { ManagedLedgerImpl ledger = (ManagedLedgerImpl) factory.open("testMessagesConsumedCounterInitializedCorrect", new ManagedLedgerConfig().setRetentionTime(1, TimeUnit.HOURS).setRetentionSizeInMB(1)); Position position = ledger.addEntry("1".getBytes(Encoding)); NonDurableCursorImpl cursor = (NonDurableCursorImpl) ledger.newNonDurableCursor(PositionFactory.EARLIEST); cursor.delete(position); assertEquals(cursor.getMessagesConsumedCounter(), 1); assertTrue(cursor.getMessagesConsumedCounter() <= ledger.getEntriesAddedCounter()); // cleanup. cursor.close(); ledger.close(); } private static final Logger log = LoggerFactory.getLogger(NonDurableCursorTest.class); }
googleapis/google-cloud-java
37,900
java-video-stitcher/proto-google-cloud-video-stitcher-v1/src/main/java/com/google/cloud/video/stitcher/v1/ListVodStitchDetailsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/video/stitcher/v1/video_stitcher_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.video.stitcher.v1; /** * * * <pre> * Response message for VideoStitcherService.listVodStitchDetails. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse} */ public final class ListVodStitchDetailsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) ListVodStitchDetailsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListVodStitchDetailsResponse.newBuilder() to construct. private ListVodStitchDetailsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListVodStitchDetailsResponse() { vodStitchDetails_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListVodStitchDetailsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_ListVodStitchDetailsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_ListVodStitchDetailsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.class, com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.Builder.class); } public static final int VOD_STITCH_DETAILS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.video.stitcher.v1.VodStitchDetail> vodStitchDetails_; /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.video.stitcher.v1.VodStitchDetail> getVodStitchDetailsList() { return vodStitchDetails_; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder> getVodStitchDetailsOrBuilderList() { return vodStitchDetails_; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ @java.lang.Override public int getVodStitchDetailsCount() { return vodStitchDetails_.size(); } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ @java.lang.Override public com.google.cloud.video.stitcher.v1.VodStitchDetail getVodStitchDetails(int index) { return vodStitchDetails_.get(index); } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ @java.lang.Override public com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder getVodStitchDetailsOrBuilder( int index) { return vodStitchDetails_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < vodStitchDetails_.size(); i++) { output.writeMessage(1, vodStitchDetails_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < vodStitchDetails_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, vodStitchDetails_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse)) { return super.equals(obj); } com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse other = (com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) obj; if (!getVodStitchDetailsList().equals(other.getVodStitchDetailsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getVodStitchDetailsCount() > 0) { hash = (37 * hash) + VOD_STITCH_DETAILS_FIELD_NUMBER; hash = (53 * hash) + getVodStitchDetailsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for VideoStitcherService.listVodStitchDetails. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_ListVodStitchDetailsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_ListVodStitchDetailsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.class, com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.Builder.class); } // Construct using com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (vodStitchDetailsBuilder_ == null) { vodStitchDetails_ = java.util.Collections.emptyList(); } else { vodStitchDetails_ = null; vodStitchDetailsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_ListVodStitchDetailsResponse_descriptor; } @java.lang.Override public com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse getDefaultInstanceForType() { return com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse build() { com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse buildPartial() { com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse result = new com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse result) { if (vodStitchDetailsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { vodStitchDetails_ = java.util.Collections.unmodifiableList(vodStitchDetails_); bitField0_ = (bitField0_ & ~0x00000001); } result.vodStitchDetails_ = vodStitchDetails_; } else { result.vodStitchDetails_ = vodStitchDetailsBuilder_.build(); } } private void buildPartial0( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) { return mergeFrom((com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse other) { if (other == com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse.getDefaultInstance()) return this; if (vodStitchDetailsBuilder_ == null) { if (!other.vodStitchDetails_.isEmpty()) { if (vodStitchDetails_.isEmpty()) { vodStitchDetails_ = other.vodStitchDetails_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.addAll(other.vodStitchDetails_); } onChanged(); } } else { if (!other.vodStitchDetails_.isEmpty()) { if (vodStitchDetailsBuilder_.isEmpty()) { vodStitchDetailsBuilder_.dispose(); vodStitchDetailsBuilder_ = null; vodStitchDetails_ = other.vodStitchDetails_; bitField0_ = (bitField0_ & ~0x00000001); vodStitchDetailsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getVodStitchDetailsFieldBuilder() : null; } else { vodStitchDetailsBuilder_.addAllMessages(other.vodStitchDetails_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.video.stitcher.v1.VodStitchDetail m = input.readMessage( com.google.cloud.video.stitcher.v1.VodStitchDetail.parser(), extensionRegistry); if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.add(m); } else { vodStitchDetailsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.video.stitcher.v1.VodStitchDetail> vodStitchDetails_ = java.util.Collections.emptyList(); private void ensureVodStitchDetailsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { vodStitchDetails_ = new java.util.ArrayList<com.google.cloud.video.stitcher.v1.VodStitchDetail>( vodStitchDetails_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.VodStitchDetail, com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder, com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder> vodStitchDetailsBuilder_; /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public java.util.List<com.google.cloud.video.stitcher.v1.VodStitchDetail> getVodStitchDetailsList() { if (vodStitchDetailsBuilder_ == null) { return java.util.Collections.unmodifiableList(vodStitchDetails_); } else { return vodStitchDetailsBuilder_.getMessageList(); } } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public int getVodStitchDetailsCount() { if (vodStitchDetailsBuilder_ == null) { return vodStitchDetails_.size(); } else { return vodStitchDetailsBuilder_.getCount(); } } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public com.google.cloud.video.stitcher.v1.VodStitchDetail getVodStitchDetails(int index) { if (vodStitchDetailsBuilder_ == null) { return vodStitchDetails_.get(index); } else { return vodStitchDetailsBuilder_.getMessage(index); } } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder setVodStitchDetails( int index, com.google.cloud.video.stitcher.v1.VodStitchDetail value) { if (vodStitchDetailsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVodStitchDetailsIsMutable(); vodStitchDetails_.set(index, value); onChanged(); } else { vodStitchDetailsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder setVodStitchDetails( int index, com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder builderForValue) { if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.set(index, builderForValue.build()); onChanged(); } else { vodStitchDetailsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder addVodStitchDetails(com.google.cloud.video.stitcher.v1.VodStitchDetail value) { if (vodStitchDetailsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVodStitchDetailsIsMutable(); vodStitchDetails_.add(value); onChanged(); } else { vodStitchDetailsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder addVodStitchDetails( int index, com.google.cloud.video.stitcher.v1.VodStitchDetail value) { if (vodStitchDetailsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureVodStitchDetailsIsMutable(); vodStitchDetails_.add(index, value); onChanged(); } else { vodStitchDetailsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder addVodStitchDetails( com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder builderForValue) { if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.add(builderForValue.build()); onChanged(); } else { vodStitchDetailsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder addVodStitchDetails( int index, com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder builderForValue) { if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.add(index, builderForValue.build()); onChanged(); } else { vodStitchDetailsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder addAllVodStitchDetails( java.lang.Iterable<? extends com.google.cloud.video.stitcher.v1.VodStitchDetail> values) { if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, vodStitchDetails_); onChanged(); } else { vodStitchDetailsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder clearVodStitchDetails() { if (vodStitchDetailsBuilder_ == null) { vodStitchDetails_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { vodStitchDetailsBuilder_.clear(); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public Builder removeVodStitchDetails(int index) { if (vodStitchDetailsBuilder_ == null) { ensureVodStitchDetailsIsMutable(); vodStitchDetails_.remove(index); onChanged(); } else { vodStitchDetailsBuilder_.remove(index); } return this; } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder getVodStitchDetailsBuilder( int index) { return getVodStitchDetailsFieldBuilder().getBuilder(index); } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder getVodStitchDetailsOrBuilder( int index) { if (vodStitchDetailsBuilder_ == null) { return vodStitchDetails_.get(index); } else { return vodStitchDetailsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public java.util.List<? extends com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder> getVodStitchDetailsOrBuilderList() { if (vodStitchDetailsBuilder_ != null) { return vodStitchDetailsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(vodStitchDetails_); } } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder addVodStitchDetailsBuilder() { return getVodStitchDetailsFieldBuilder() .addBuilder(com.google.cloud.video.stitcher.v1.VodStitchDetail.getDefaultInstance()); } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder addVodStitchDetailsBuilder( int index) { return getVodStitchDetailsFieldBuilder() .addBuilder( index, com.google.cloud.video.stitcher.v1.VodStitchDetail.getDefaultInstance()); } /** * * * <pre> * A List of stitch Details. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.VodStitchDetail vod_stitch_details = 1;</code> */ public java.util.List<com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder> getVodStitchDetailsBuilderList() { return getVodStitchDetailsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.VodStitchDetail, com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder, com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder> getVodStitchDetailsFieldBuilder() { if (vodStitchDetailsBuilder_ == null) { vodStitchDetailsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.VodStitchDetail, com.google.cloud.video.stitcher.v1.VodStitchDetail.Builder, com.google.cloud.video.stitcher.v1.VodStitchDetailOrBuilder>( vodStitchDetails_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); vodStitchDetails_ = null; } return vodStitchDetailsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The pagination token. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse) private static final com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse(); } public static com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListVodStitchDetailsResponse> PARSER = new com.google.protobuf.AbstractParser<ListVodStitchDetailsResponse>() { @java.lang.Override public ListVodStitchDetailsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListVodStitchDetailsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListVodStitchDetailsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.video.stitcher.v1.ListVodStitchDetailsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,924
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/FetchFeatureValuesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/feature_online_store_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [FeatureOnlineStoreService.FetchFeatureValues][google.cloud.aiplatform.v1.FeatureOnlineStoreService.FetchFeatureValues]. * All the features under the requested feature view will be returned. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.FetchFeatureValuesRequest} */ public final class FetchFeatureValuesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.FetchFeatureValuesRequest) FetchFeatureValuesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use FetchFeatureValuesRequest.newBuilder() to construct. private FetchFeatureValuesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FetchFeatureValuesRequest() { featureView_ = ""; dataFormat_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FetchFeatureValuesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_FetchFeatureValuesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_FetchFeatureValuesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.class, com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.Builder.class); } private int bitField0_; public static final int FEATURE_VIEW_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object featureView_ = ""; /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The featureView. */ @java.lang.Override public java.lang.String getFeatureView() { java.lang.Object ref = featureView_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureView_ = s; return s; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for featureView. */ @java.lang.Override public com.google.protobuf.ByteString getFeatureViewBytes() { java.lang.Object ref = featureView_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); featureView_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATA_KEY_FIELD_NUMBER = 6; private com.google.cloud.aiplatform.v1.FeatureViewDataKey dataKey_; /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the dataKey field is set. */ @java.lang.Override public boolean hasDataKey() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The dataKey. */ @java.lang.Override public com.google.cloud.aiplatform.v1.FeatureViewDataKey getDataKey() { return dataKey_ == null ? com.google.cloud.aiplatform.v1.FeatureViewDataKey.getDefaultInstance() : dataKey_; } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.FeatureViewDataKeyOrBuilder getDataKeyOrBuilder() { return dataKey_ == null ? com.google.cloud.aiplatform.v1.FeatureViewDataKey.getDefaultInstance() : dataKey_; } public static final int DATA_FORMAT_FIELD_NUMBER = 7; private int dataFormat_ = 0; /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for dataFormat. */ @java.lang.Override public int getDataFormatValue() { return dataFormat_; } /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The dataFormat. */ @java.lang.Override public com.google.cloud.aiplatform.v1.FeatureViewDataFormat getDataFormat() { com.google.cloud.aiplatform.v1.FeatureViewDataFormat result = com.google.cloud.aiplatform.v1.FeatureViewDataFormat.forNumber(dataFormat_); return result == null ? com.google.cloud.aiplatform.v1.FeatureViewDataFormat.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, featureView_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(6, getDataKey()); } if (dataFormat_ != com.google.cloud.aiplatform.v1.FeatureViewDataFormat.FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED .getNumber()) { output.writeEnum(7, dataFormat_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, featureView_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(6, getDataKey()); } if (dataFormat_ != com.google.cloud.aiplatform.v1.FeatureViewDataFormat.FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(7, dataFormat_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest other = (com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest) obj; if (!getFeatureView().equals(other.getFeatureView())) return false; if (hasDataKey() != other.hasDataKey()) return false; if (hasDataKey()) { if (!getDataKey().equals(other.getDataKey())) return false; } if (dataFormat_ != other.dataFormat_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + FEATURE_VIEW_FIELD_NUMBER; hash = (53 * hash) + getFeatureView().hashCode(); if (hasDataKey()) { hash = (37 * hash) + DATA_KEY_FIELD_NUMBER; hash = (53 * hash) + getDataKey().hashCode(); } hash = (37 * hash) + DATA_FORMAT_FIELD_NUMBER; hash = (53 * hash) + dataFormat_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [FeatureOnlineStoreService.FetchFeatureValues][google.cloud.aiplatform.v1.FeatureOnlineStoreService.FetchFeatureValues]. * All the features under the requested feature view will be returned. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.FetchFeatureValuesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.FetchFeatureValuesRequest) com.google.cloud.aiplatform.v1.FetchFeatureValuesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_FetchFeatureValuesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_FetchFeatureValuesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.class, com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDataKeyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; featureView_ = ""; dataKey_ = null; if (dataKeyBuilder_ != null) { dataKeyBuilder_.dispose(); dataKeyBuilder_ = null; } dataFormat_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_FetchFeatureValuesRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest build() { com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest buildPartial() { com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest result = new com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.featureView_ = featureView_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.dataKey_ = dataKeyBuilder_ == null ? dataKey_ : dataKeyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.dataFormat_ = dataFormat_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest other) { if (other == com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest.getDefaultInstance()) return this; if (!other.getFeatureView().isEmpty()) { featureView_ = other.featureView_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasDataKey()) { mergeDataKey(other.getDataKey()); } if (other.dataFormat_ != 0) { setDataFormatValue(other.getDataFormatValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { featureView_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 50: { input.readMessage(getDataKeyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 50 case 56: { dataFormat_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 56 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object featureView_ = ""; /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The featureView. */ public java.lang.String getFeatureView() { java.lang.Object ref = featureView_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureView_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for featureView. */ public com.google.protobuf.ByteString getFeatureViewBytes() { java.lang.Object ref = featureView_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); featureView_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The featureView to set. * @return This builder for chaining. */ public Builder setFeatureView(java.lang.String value) { if (value == null) { throw new NullPointerException(); } featureView_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearFeatureView() { featureView_ = getDefaultInstance().getFeatureView(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for featureView to set. * @return This builder for chaining. */ public Builder setFeatureViewBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); featureView_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.aiplatform.v1.FeatureViewDataKey dataKey_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.FeatureViewDataKey, com.google.cloud.aiplatform.v1.FeatureViewDataKey.Builder, com.google.cloud.aiplatform.v1.FeatureViewDataKeyOrBuilder> dataKeyBuilder_; /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the dataKey field is set. */ public boolean hasDataKey() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The dataKey. */ public com.google.cloud.aiplatform.v1.FeatureViewDataKey getDataKey() { if (dataKeyBuilder_ == null) { return dataKey_ == null ? com.google.cloud.aiplatform.v1.FeatureViewDataKey.getDefaultInstance() : dataKey_; } else { return dataKeyBuilder_.getMessage(); } } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setDataKey(com.google.cloud.aiplatform.v1.FeatureViewDataKey value) { if (dataKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dataKey_ = value; } else { dataKeyBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setDataKey( com.google.cloud.aiplatform.v1.FeatureViewDataKey.Builder builderForValue) { if (dataKeyBuilder_ == null) { dataKey_ = builderForValue.build(); } else { dataKeyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeDataKey(com.google.cloud.aiplatform.v1.FeatureViewDataKey value) { if (dataKeyBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && dataKey_ != null && dataKey_ != com.google.cloud.aiplatform.v1.FeatureViewDataKey.getDefaultInstance()) { getDataKeyBuilder().mergeFrom(value); } else { dataKey_ = value; } } else { dataKeyBuilder_.mergeFrom(value); } if (dataKey_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearDataKey() { bitField0_ = (bitField0_ & ~0x00000002); dataKey_ = null; if (dataKeyBuilder_ != null) { dataKeyBuilder_.dispose(); dataKeyBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.aiplatform.v1.FeatureViewDataKey.Builder getDataKeyBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDataKeyFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.aiplatform.v1.FeatureViewDataKeyOrBuilder getDataKeyOrBuilder() { if (dataKeyBuilder_ != null) { return dataKeyBuilder_.getMessageOrBuilder(); } else { return dataKey_ == null ? com.google.cloud.aiplatform.v1.FeatureViewDataKey.getDefaultInstance() : dataKey_; } } /** * * * <pre> * Optional. The request key to fetch feature values for. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataKey data_key = 6 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.FeatureViewDataKey, com.google.cloud.aiplatform.v1.FeatureViewDataKey.Builder, com.google.cloud.aiplatform.v1.FeatureViewDataKeyOrBuilder> getDataKeyFieldBuilder() { if (dataKeyBuilder_ == null) { dataKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.FeatureViewDataKey, com.google.cloud.aiplatform.v1.FeatureViewDataKey.Builder, com.google.cloud.aiplatform.v1.FeatureViewDataKeyOrBuilder>( getDataKey(), getParentForChildren(), isClean()); dataKey_ = null; } return dataKeyBuilder_; } private int dataFormat_ = 0; /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for dataFormat. */ @java.lang.Override public int getDataFormatValue() { return dataFormat_; } /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The enum numeric value on the wire for dataFormat to set. * @return This builder for chaining. */ public Builder setDataFormatValue(int value) { dataFormat_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The dataFormat. */ @java.lang.Override public com.google.cloud.aiplatform.v1.FeatureViewDataFormat getDataFormat() { com.google.cloud.aiplatform.v1.FeatureViewDataFormat result = com.google.cloud.aiplatform.v1.FeatureViewDataFormat.forNumber(dataFormat_); return result == null ? com.google.cloud.aiplatform.v1.FeatureViewDataFormat.UNRECOGNIZED : result; } /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @param value The dataFormat to set. * @return This builder for chaining. */ public Builder setDataFormat(com.google.cloud.aiplatform.v1.FeatureViewDataFormat value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; dataFormat_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Optional. Response data format. If not set, * [FeatureViewDataFormat.KEY_VALUE][google.cloud.aiplatform.v1.FeatureViewDataFormat.KEY_VALUE] * will be used. * </pre> * * <code> * .google.cloud.aiplatform.v1.FeatureViewDataFormat data_format = 7 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return This builder for chaining. */ public Builder clearDataFormat() { bitField0_ = (bitField0_ & ~0x00000004); dataFormat_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.FetchFeatureValuesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.FetchFeatureValuesRequest) private static final com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest(); } public static com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FetchFeatureValuesRequest> PARSER = new com.google.protobuf.AbstractParser<FetchFeatureValuesRequest>() { @java.lang.Override public FetchFeatureValuesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FetchFeatureValuesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FetchFeatureValuesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.FetchFeatureValuesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
36,893
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/lang/ForeignKeysNonSpsTest.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.ForeignKeysNonSpsTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.lang; import java.sql.CallableStatement; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * Test of foreign key constraints. Converted from the old harness test * lang/fk_nonSPS.sql */ public final class ForeignKeysNonSpsTest extends BaseJDBCTestCase { private static final int WAIT_TIMEOUT_DURATION = 4; /** * Public constructor required for running test as standalone JUnit. * @param name test name */ public ForeignKeysNonSpsTest (String name) { super(name); } /** * JUnit handle * @return this JUnit test */ public static Test suite() { return TestConfiguration.defaultSuite(ForeignKeysNonSpsTest.class); } public void testForeignKeys() throws Exception { ResultSet rs; PreparedStatement pSt; String [][] expRS; final Statement st = createStatement(); final int initialCardSysDepends = numberOfRowsInSysdepends(st); st.executeUpdate( "CREATE PROCEDURE WAIT_FOR_POST_COMMIT() DYNAMIC " + "RESULT SETS 0 LANGUAGE JAVA EXTERNAL NAME " + "'org.apache.derbyTesting.functionTests.util.T_Access" + ".waitForPostCommitToFinish' PARAMETER STYLE JAVA"); st.executeUpdate( " create table p (c1 char(1), y int not null, c2 " + "char(1), x int not null, constraint pk primary key (x,y))"); st.executeUpdate( " create table f (x int not null, s smallint, y int " + "not null, constraint fk foreign key (x,y) references p)"); st.executeUpdate("insert into p values ('1',1,'1',1)"); // should pass, foreign key constraint satisfied st.executeUpdate( "insert into f " + "values " + " (1,1,1)," + " (1,1,1)," + " (1,1,1), " + " (1,1,1)," + " (1, 0, 1)," + " (1,1,1)," + " (1,0,1)," + " (1, 0, 1)"); // should FAIL, foreign key constraint violated assertUpdateCount(st, 8, "delete from f"); assertStatementError("23503", st, " insert into f " + "values " + " (1,1,1)," + " (1,1,1)," + " (1,1,1), " + " (1,1,1)," + " (1, 1, 1)," + " (2,1,666)," + " (1,1,0)," + " (0, 1, 0)"); st.executeUpdate("drop table f"); waitForPostCommit(); // make sure boundary conditions are ok, null insert set st.executeUpdate( "create table f (c1 char(1), y int, c2 char(1), x " + "int, constraint fk foreign key (x,y) references p)"); st.executeUpdate("insert into f select * from p where 1 = 2"); st.executeUpdate("drop table f"); st.executeUpdate("drop table p"); waitForPostCommit(); // self referencing st.executeUpdate( "create table s (x int not null primary key, y int " + "references s, z int references s)"); // ok st.executeUpdate( "insert into s " + "values " + " (1,1,1)," + " (2,1,1)," + " (10,2,1), " + " (11,1,2)," + " (12,4,4)," + " (4,1,1)," + " (13,null,null)," + " (14,1,2)," + " (15,null, 1)"); assertUpdateCount(st, 9, "delete from s"); // bad assertStatementError("23503", st, "insert into s " + "values " + " (1,1,1)," + " (2,1,1)," + " (10,2,1), " + " (11,1,2)," + " (12,4,4)," + " (4,1,1)," + " (13,null,null)," + " (14,1,2)," + " (15,666, 1)"); // now a test for depenencies. the insert will create new // index conglomerate numbers, so we want to test that a // statement with a constraint check that is dependent on // the conglomerate number that is being changed is invalidated st.executeUpdate( "create table x (x int not null, y int, constraint " + "pk primary key (x))"); st.executeUpdate( " create table y (x int , y int, constraint fk " + "foreign key (x) references x)"); final PreparedStatement pStIx = prepareStatement( "insert into x values" + "(0,0)," + "(1,1)," + "(2,2)"); final PreparedStatement pStIx2 = prepareStatement( "insert into x values" + "(3,3),"+ "(4,4)"); final PreparedStatement pStIy = prepareStatement( "insert into y values" + "(0,0)," + "(1,1)," + "(2,2)"); final PreparedStatement dy = prepareStatement( "delete from y where x = 1"); final PreparedStatement dx = prepareStatement( "delete from x where x = 1"); assertUpdateCount(pStIx, 3); setAutoCommit(false); commit(); // ok assertUpdateCount(dy, 0); assertUpdateCount(dx, 1); // will fail, no key 1 in x assertStatementError("23503", pStIy); rollback(); assertUpdateCount(pStIy, 3); assertUpdateCount(dy, 1); assertUpdateCount(dx, 1); pStIx.close(); pStIx2.close(); pStIy.close(); dy.close(); dx.close(); st.executeUpdate("drop table y"); st.executeUpdate("drop table x"); st.executeUpdate("drop table s"); setAutoCommit(true); waitForPostCommit(); // ** insert fkddl.sql simple syntax checks column constraint st.executeUpdate( "create table p1 (x int not null, constraint pk1 " + "primary key(x))"); st.executeUpdate( " create table u1 (x int not null unique)"); // table constraint st.executeUpdate( "create table p2 (x int not null, y dec(5,2) not " + "null, constraint pk2 primary key (x,y))"); st.executeUpdate( " create table u2 (x int not null, y dec(5,2) not " + "null, constraint uk2 unique (x,y))"); st.executeUpdate( " create table p3 (x char(10) not null, constraint " + "pk3 primary key (x))"); // for future use st.executeUpdate("create schema otherschema"); st.executeUpdate( " create table otherschema.p1 (x int not null primary key)"); // Negative test cases for foreign key TABLE constraints // negative: fk table, no table assertStatementError("X0Y46", st, "create table f (x int, constraint fk foreign key " + "(x) references notthere)"); // negative: fk table, bad column assertStatementError("X0Y44", st, "create table f (x int, constraint fk foreign key " + "(x) references p1(notthere))"); // negative: fk table, no constraint assertStatementError("X0Y44", st, "create table f (x int, constraint fk foreign key " + "(x) references p2(y))"); // negative: fk table, wrong type assertStatementError("X0Y44", st, "create table f (x smallint, constraint fk foreign " + "key (x) references p1(x))"); // negative: cannot reference a system table assertStatementError("42Y08", st, "create table f (x char(36), constraint fk foreign " + "key (x) references sys.sysforeignkeys(constraintid))"); // negative: bad schema assertStatementError("42Y07", st, "create table f (x char(36), constraint fk foreign " + "key (x) references badschema.x)"); // negative: bad column list assertStatementError("42X93", st, "create table f (x dec(5,2), y int, constraint fk " + "foreign key (x,z) references p2(x,y))"); // negative: wrong number of columns assertStatementError("X0Y44", st, "create table f (x dec(5,2), y int, constraint fk " + "foreign key (x) references p2(x,y))"); assertStatementError("X0Y44", st, " create table f (x dec(5,2), y int, constraint fk " + "foreign key (x,y) references p2(x))"); // Negative test cases for foreign key COLUMN constraints // negative: fk column, no table assertStatementError("X0Y46", st, "create table f (x int references notthere)"); // negative: fk column, bad column assertStatementError("X0Y44", st, "create table f (x int references p1(notthere))"); // negative: fk column, no constraint assertStatementError("X0Y44", st, "create table f (x int references p2(y))"); // negative: fk column, wrong type assertStatementError("X0Y44", st, "create table f (x smallint references p1(x))"); // negative: cannot reference a system table assertStatementError("42Y08", st, "create table f (x char(36) references " + "sys.sysforeignkeys(constraintid))"); // negative: bad schema assertStatementError("42Y07", st, "create table f (x char(36) references badschema.x)"); // Some type checks. Types must match exactly ok st.executeUpdate( "create table f (d dec(5,2), i int, constraint fk " + "foreign key (i,d) references p2(x,y))"); st.executeUpdate( " drop table f"); waitForPostCommit(); st.executeUpdate( " create table f (i int, d dec(5,2), constraint fk " + "foreign key (i,d) references p2(x,y))"); st.executeUpdate("drop table f"); waitForPostCommit(); st.executeUpdate( " create table f (d dec(5,2), i int, constraint fk " + "foreign key (i,d) references u2(x,y))"); st.executeUpdate("drop table f"); waitForPostCommit(); st.executeUpdate( " create table f (i int, d dec(5,2), constraint fk " + "foreign key (i,d) references u2(x,y))"); st.executeUpdate("drop table f"); waitForPostCommit(); st.executeUpdate( " create table f (c char(10) references p3(x))"); st.executeUpdate("drop table f"); waitForPostCommit(); // type mismatch assertStatementError("X0Y44", st, "create table f (i int, d dec(5,1), constraint fk " + "foreign key (i,d) references p2(x,y))"); assertStatementError("X0Y44", st, " create table f (i int, d dec(4,2), constraint fk " + "foreign key (i,d) references p2(x,y))"); assertStatementError("X0Y44", st, " create table f (i int, d dec(4,2), constraint fk " + "foreign key (i,d) references p2(x,y))"); assertStatementError("X0Y44", st, " create table f (i int, d numeric(5,2), constraint " + "fk foreign key (i,d) references p2(x,y))"); assertStatementError("X0Y44", st, " create table f (c char(11) references p3(x))"); assertStatementError("X0Y44", st, " create table f (c varchar(10) references p3(x))"); // wrong order assertStatementError("X0Y44", st, "create table f (d dec(5,2), i int, constraint fk " + "foreign key (d,i) references p2(x,y))"); // check system tables st.executeUpdate( "create table f (x int, constraint fk foreign key " + "(x) references p1)"); rs = st.executeQuery( " select constraintname, referencecount " + " from sys.sysconstraints c, sys.sysforeignkeys fk" + " where fk.keyconstraintid = c.constraintid order by " + "constraintname"); expRS = new String [][]{{"PK1", "1"}}; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate( " create table f2 (x int, constraint fk2 foreign key " + "(x) references p1(x))"); st.executeUpdate( " create table f3 (x int, constraint fk3 foreign key " + "(x) references p1(x))"); st.executeUpdate( " create table f4 (x int, constraint fk4 foreign key " + "(x) references p1(x))"); rs = st.executeQuery( " select distinct constraintname, referencecount " + " from sys.sysconstraints c, sys.sysforeignkeys fk" + " where fk.keyconstraintid = c.constraintid order by " + "constraintname"); expRS = new String [][]{{"PK1", "4"}}; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery( " select constraintname " + " from sys.sysconstraints c, sys.sysforeignkeys fk" + " where fk.constraintid = c.constraintid" + " order by 1"); expRS = new String [][] { {"FK"}, {"FK2"}, {"FK3"}, {"FK4"} }; JDBC.assertFullResultSet(rs, expRS, true); // we should not be able to drop the primary key assertStatementError("X0Y25", st, "alter table p1 drop constraint pk1"); assertStatementError("X0Y25", st, "drop table p1"); waitForPostCommit(); // now lets drop the foreign keys and try again st.executeUpdate("drop table f2"); st.executeUpdate("drop table f3"); st.executeUpdate("drop table f4"); waitForPostCommit(); rs = st.executeQuery( " select constraintname, referencecount " + " from sys.sysconstraints c, sys.sysforeignkeys fk" + " where fk.keyconstraintid = c.constraintid order by " + "constraintname"); expRS = new String [][]{{"PK1", "1"}}; JDBC.assertFullResultSet(rs, expRS, true); st.executeUpdate(" alter table f drop constraint fk"); waitForPostCommit(); // ok st.executeUpdate("alter table p1 drop constraint pk1"); waitForPostCommit(); // we shouldn't be able to add an fk on p1 now assertStatementError("X0Y41", st, "alter table f add constraint fk foreign key (x) " + "references p1"); // add the constraint and try again st.executeUpdate( "alter table p1 add constraint pk1 primary key (x)"); st.executeUpdate( " create table f2 (x int, constraint fk2 foreign key " + "(x) references p1(x))"); st.executeUpdate( " create table f3 (x int, constraint fk3 foreign key " + "(x) references p1(x))"); st.executeUpdate( " create table f4 (x int, constraint fk4 foreign key " + "(x) references p1(x))"); // drop constraint st.executeUpdate("alter table f4 drop constraint fk4"); st.executeUpdate("alter table f3 drop constraint fk3"); st.executeUpdate("alter table f2 drop constraint fk2"); st.executeUpdate("alter table p1 drop constraint pk1"); waitForPostCommit(); // all fks are gone, right? rs = st.executeQuery( "select constraintname " + " from sys.sysconstraints c, sys.sysforeignkeys fk" + " where fk.constraintid = c.constraintid order by " + "constraintname"); JDBC.assertDrainResults(rs, 0); // cleanup what we have done so far st.executeUpdate("drop table p1"); st.executeUpdate("drop table p2"); st.executeUpdate("drop table u1"); st.executeUpdate("drop table u2"); st.executeUpdate("drop table otherschema.p1"); st.executeUpdate("drop schema otherschema restrict"); waitForPostCommit(); // will return dependencies for SPS metadata queries now // created by default database is created. st.executeUpdate( "create table default_sysdepends_count(a int)"); st.executeUpdate( " insert into default_sysdepends_count select " + "count(*) from sys.sysdepends"); rs = st.executeQuery( " select * from default_sysdepends_count"); expRS = new String [][]{{Integer.toString(initialCardSysDepends)}}; JDBC.assertFullResultSet(rs, expRS, true); // now we are going to do some self referencing tests. st.executeUpdate( "create table selfref (p char(10) not null primary key, " + " f char(10) references selfref)"); st.executeUpdate("drop table selfref"); waitForPostCommit(); // ok st.executeUpdate( "create table selfref (p char(10) not null, " + " f char(10) references selfref, " + " constraint pk primary key (p))"); st.executeUpdate("drop table selfref"); waitForPostCommit(); // ok st.executeUpdate( "create table selfref (p char(10) not null, f char(10), " + " constraint f foreign key (f) references selfref(p), " + " constraint pk primary key (p))"); // should fail assertStatementError("X0Y25", st, "alter table selfref drop constraint pk"); waitForPostCommit(); // ok st.executeUpdate( "alter table selfref drop constraint f"); st.executeUpdate( " alter table selfref drop constraint pk"); st.executeUpdate("drop table selfref"); waitForPostCommit(); // what if a pk references another pk? should just drop // the direct references (nothing special, really) st.executeUpdate( "create table pr1(x int not null, " + " constraint pkr1 primary key (x))"); st.executeUpdate( " create table pr2(x int not null, " + " constraint pkr2 primary key(x), " + " constraint fpkr2 foreign key (x) references pr1)"); st.executeUpdate( " create table pr3(x int not null, " + " constraint pkr3 primary key(x), " + " constraint fpkr3 foreign key (x) references pr2)"); rs = st.executeQuery( " select constraintname, referencecount from " + "sys.sysconstraints order by constraintname"); expRS = new String [][] { {"FPKR2", "0"}, {"FPKR3", "0"}, {"PK3", "0"}, {"PKR1", "1"}, {"PKR2", "1"}, {"PKR3", "0"} }; JDBC.assertFullResultSet(rs, expRS, true); // now drop constraint pkr1 st.executeUpdate( "alter table pr2 drop constraint fpkr2"); st.executeUpdate( " alter table pr1 drop constraint pkr1"); waitForPostCommit(); // pkr1 and pfkr2 are gone rs = st.executeQuery( "select constraintname, referencecount from " + "sys.sysconstraints order by constraintname"); expRS = new String [][] { {"FPKR3", "0"}, {"PK3", "0"}, {"PKR2", "1"}, {"PKR3", "0"} }; JDBC.assertFullResultSet(rs, expRS, true); // cleanup st.executeUpdate( "drop table pr3"); st.executeUpdate("drop table pr2"); st.executeUpdate("drop table pr1"); waitForPostCommit(); // should return 0, confirm no unexpected dependencies // verify that all rows in sys.sysdepends got dropped // apart from sps dependencies st.executeUpdate( "create table default_sysdepends_count2(a int)"); st.executeUpdate( " insert into default_sysdepends_count2 select " + "count(*) from sys.sysdepends"); rs = st.executeQuery( " select default_sysdepends_count2.a - " + "default_sysdepends_count.a" + " from default_sysdepends_count2, default_sysdepends_count"); expRS = new String [][]{{"0"}}; JDBC.assertFullResultSet(rs, expRS, true); // dependencies and spses st.executeUpdate( "create table x (x int not null primary key, y int, " + "constraint xfk foreign key (y) references x)"); st.executeUpdate( " create table y (x int, constraint yfk foreign key " + "(x) references x)"); final PreparedStatement ss = prepareStatement( "select * from x"); final PreparedStatement si = prepareStatement( "insert into x values (1,1)"); final PreparedStatement su = prepareStatement( "update x set x = x+1, y=y+1"); st.executeUpdate( " alter table x drop constraint xfk"); waitForPostCommit(); setAutoCommit(false); // drop the referenced fk, should force su to be // recompiled since it no longer has to check the foreign // key table st.executeUpdate( "alter table y drop constraint yfk"); commit(); waitForPostCommit(); st.executeUpdate("drop table y"); commit(); waitForPostCommit(); // ok st.executeUpdate("drop table x"); ss.close(); si.close(); su.close(); st.executeUpdate("drop table f3"); st.executeUpdate("drop table f2"); st.executeUpdate("drop table f"); commit(); waitForPostCommit(); // verify that all rows in sys.sysdepends got dropped // apart from sps dependencies Since, with beetle 5352; we // create metadata SPS for network server at database // bootup time so the dependencies for SPS are there. st.executeUpdate( "create table default_sysdepends_count3(a int)"); st.executeUpdate( " insert into default_sysdepends_count3 select " + "count(*) from sys.sysdepends"); rs = st.executeQuery( " select default_sysdepends_count3.a - " + "default_sysdepends_count.a" + " from default_sysdepends_count3, default_sysdepends_count"); expRS = new String [][]{{"0"}}; JDBC.assertFullResultSet(rs, expRS, true); // ** insert fkdml.sql setAutoCommit(true); // DML and foreign keys assertStatementError("42Y55", st, "drop table s"); assertStatementError("42Y55", st, "drop table f3"); assertStatementError("42Y55", st, "drop table f2"); assertStatementError("42Y55", st, "drop table f"); assertStatementError("42Y55", st, "drop table p"); waitForPostCommit(); st.executeUpdate( " create table p (x int not null, y int not null, " + "constraint pk primary key (x,y))"); st.executeUpdate( " create table f (x int, y int, constraint fk " + "foreign key (x,y) references p)"); st.executeUpdate( " insert into p values (1,1)"); // ok st.executeUpdate("insert into f values (1,1)"); // fail assertStatementError("23503", st, "insert into f values (2,1)"); assertStatementError("23503", st, " insert into f values (1,2)"); // nulls are ok st.executeUpdate("insert into f values (1,null)"); st.executeUpdate("insert into f values (null,null)"); st.executeUpdate("insert into f values (1,null)"); // update on pk, fail assertStatementError("23503", st, "update p set x = 2"); assertStatementError("23503", st, "update p set y = 2"); assertStatementError("23503", st, "update p set x = 1, y = 2"); assertStatementError("23503", st, "update p set x = 2, y = 1"); assertStatementError("23503", st, "update p set x = 2, y = 2"); // ok assertUpdateCount(st, 1, "update p set x = 1, y = 1"); // delete pk, fail assertStatementError("23503", st, "delete from p"); // delete fk, ok assertUpdateCount(st, 4, "delete from f"); st.executeUpdate("insert into f values (1,1)"); // update fk, fail assertStatementError("23503", st, "update f set x = 2"); assertStatementError("23503", st, "update f set y = 2"); assertStatementError("23503", st, "update f set x = 1, y = 2"); assertStatementError("23503", st, "update f set x = 2, y = 1"); // update fk, ok assertUpdateCount(st, 1, "update f set x = 1, y = 1"); // nulls ok assertUpdateCount(st, 1, "update f set x = null, y = 1"); assertUpdateCount(st, 1, "update f set x = 1, y = null"); assertUpdateCount(st, 1, "update f set x = null, y = null"); assertUpdateCount(st, 1, "delete from f"); st.executeUpdate("insert into f values (1,1)"); st.executeUpdate("insert into p values (2,2)"); // ok assertUpdateCount(st, 1, "update f set x = x+1, y = y+1"); rs = st.executeQuery("select * from f"); expRS = new String [][]{{"2", "2"}}; JDBC.assertFullResultSet(rs, expRS, true); rs = st.executeQuery("select * from p"); expRS = new String [][] { {"1", "1"}, {"2", "2"} }; JDBC.assertFullResultSet(rs, expRS, true); // ok assertUpdateCount(st, 2, "update p set x = x+1, y = y+1"); // fail assertStatementError("23503", st, "update p set x = x+1, y = y+1"); // BOUNDARY CONDITIONS assertUpdateCount(st, 1, "delete from f"); assertUpdateCount(st, 2, "delete from p"); st.executeUpdate("insert into f select * from f"); assertUpdateCount(st, 0, "delete from p where x = 9999"); assertUpdateCount(st, 0, "update p set x = x+1, y=y+1 where x = 999"); st.executeUpdate("insert into p values (1,1)"); st.executeUpdate("insert into f values (1,1)"); assertUpdateCount(st, 0, "update p set x = x+1, y=y+1 where x = 999"); assertUpdateCount(st, 0, "delete from p where x = 9999"); st.executeUpdate("insert into f select * from f"); // test a CURSOR assertUpdateCount(st, 2, "delete from f"); assertUpdateCount(st, 1, "delete from p"); st.executeUpdate("insert into p values (1,1)"); st.executeUpdate("insert into f values (1,1)"); setAutoCommit(false); final Statement uS = createStatement( ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet r = uS.executeQuery("select * from p for update of x"); r.next(); assertEquals(r.getString(1), "1"); assertEquals(r.getString(2), "1"); try { // UPDATE on table 'P' caused a violation of foreign // key constraint 'FK' for key (1,1). r.updateInt("X", 666); } catch (SQLException e) { assertSQLState("23503", e); } r.close(); r = uS.executeQuery("select * from f for update of x"); r.next(); assertEquals(r.getString(1), "1"); assertEquals(r.getString(2), "1"); try { // UPDATE on table 'F' caused a violation of foreign // key constraint 'FK' for key (666,1). r.updateInt("X", 666); } catch (SQLException e) { assertSQLState("23503", e); } r.close(); commit(); setAutoCommit(true); assertUpdateCount(st, 1, "delete from f"); assertUpdateCount(st, 1, "delete from p"); st.executeUpdate("insert into p values (0,0), (1,1), (2,2), (3,3), (4,4)"); st.executeUpdate("insert into f values (1,1)"); // lets add some additional foreign keys to the mix st.executeUpdate( "create table f2 (x int, y int, constraint fk2 " + "foreign key (x,y) references p)"); st.executeUpdate("insert into f2 values (2,2)"); st.executeUpdate( " create table f3 (x int, y int, constraint fk3 " + "foreign key (x,y) references p)"); st.executeUpdate("insert into f3 values (3,3)"); // ok assertUpdateCount(st, 5, "update p set x = x+1, y = y+1"); // error, fk1 assertStatementError("23503", st, "update p set x = x+1"); assertStatementError("23503", st, "update p set y = y+1"); assertStatementError("23503", st, "update p set x = x+1, y = y+1"); // fail of fk3 assertStatementError("23503", st, "update p set y = 666 where y = 3"); // fail of fk2 assertStatementError("23503", st, "update p set x = 666 where x = 2"); // cleanup st.executeUpdate("drop table f"); st.executeUpdate("drop table f2"); st.executeUpdate("drop table f3"); st.executeUpdate("drop table p"); waitForPostCommit(); // SELF REFERENCING st.executeUpdate( "create table s (x int not null primary key, y int " + "references s, z int references s)"); // ok st.executeUpdate("insert into s values (1,null,null)"); // ok assertUpdateCount(st, 1, "update s set y = 1"); // fail assertStatementError("23503", st, "update s set z = 2"); // ok assertUpdateCount(st, 1, "update s set z = 1"); // ok st.executeUpdate("insert into s values (2, 1, 1)"); // ok assertUpdateCount(st, 1, "update s set x = 666 where x = 2"); // ok assertUpdateCount(st, 2, "update s set x = x+1, y = y+1, z = z+1"); assertUpdateCount(st, 2, "delete from s"); // ok st.executeUpdate("insert into s values (1,null,null)"); st.executeUpdate("insert into s values (2,null,null)"); assertUpdateCount(st, 1, "update s set y = 2 where x = 1"); assertUpdateCount(st, 1, "update s set z = 1 where x = 2"); rs = st.executeQuery("select * from s"); expRS = new String [][] { {"1", "2", null}, {"2", null, "1"} }; JDBC.assertFullResultSet(rs, expRS, true); // fail assertStatementError("23503", st, "update s set x = 0 where x = 1"); // Now we are going to do a short but sweet check to make // sure we are actually hitting the correct columns st.executeUpdate( "create table p (c1 char(1), y int not null, c2 " + "char(1), x int not null, constraint pk primary key (x,y))"); st.executeUpdate( " create table f (x int, s smallint, y int, " + "constraint fk foreign key (x,y) references p)"); st.executeUpdate( " insert into p values ('1',1,'1',1)"); // ok st.executeUpdate("insert into f values (1,1,1)"); st.executeUpdate("insert into p values ('0',0,'0',0)"); // ok assertUpdateCount(st, 2, "update p set x = x+1, y=y+1"); // fail assertStatementError("23503", st, "delete from p where y = 1"); assertStatementError("23503", st, "insert into f values (1,1,4)"); assertUpdateCount(st, 1, "delete from f"); assertUpdateCount(st, 2, "delete from p"); // Lets make sure we don't interact poorly with 'normal' // deferred dml st.executeUpdate("insert into p values ('1',1,'1',1)"); st.executeUpdate("insert into f values (1,1,1)"); st.executeUpdate("insert into p values ('0',0,'0',0)"); // ok assertUpdateCount(st, 2, "update p set x = x+1, y=y+1 where x < (select " + "max(x)+10000 from p)"); // fail assertStatementError("23503", st, "delete from p where y = 1 and y in (select y from p)"); // inserts st.executeUpdate( "create table f2 (x int, t smallint, y int)"); st.executeUpdate("insert into f2 values (1,1,4)"); // fail assertStatementError("23503", st,"insert into f select * from f2"); // ok st.executeUpdate("insert into f2 values (1,1,1)"); st.executeUpdate("insert into f select * from f2 where y = 1"); st.executeUpdate("drop table f2"); st.executeUpdate("drop table f"); st.executeUpdate("drop table p"); waitForPostCommit(); // PREPARED STATEMENTS assertStatementError("42Y55", st, "drop table f"); assertStatementError("42Y55", st, "drop table p"); //the reason for this wait call is to wait unitil system // tables row deletesare completed other wise we will get // different order fk checksthat will lead different error // messages depending on when post commit thread runs waitForPostCommit(); pSt = prepareStatement( "create table p (w int not null primary key, x int " + "references p, y int not null, z int not null, " + "constraint uyz unique (y,z))"); assertUpdateCount(pSt, 0); pSt = prepareStatement( "create table f (w int references p, x int, y int, z " + "int, constraint fk foreign key (y,z) references p (y,z))"); assertUpdateCount(pSt, 0); pSt = prepareStatement( "alter table f drop constraint fk"); assertUpdateCount(pSt, 0); //the reason for this wait call is to wait unitil system // tables row deletesare completed other wise we will get // different order fk checks waitForPostCommit(); pSt = prepareStatement( "alter table f add constraint fk foreign key (y,z) " + "references p (y,z)"); assertUpdateCount(pSt, 0); PreparedStatement sf = prepareStatement( "insert into f values (1,1,1,1)"); PreparedStatement sp = prepareStatement( "insert into p values (1,1,1,1)"); // fail assertStatementError("23503", sf); // ok assertUpdateCount(sp, 1); assertUpdateCount(sf, 1); st.executeUpdate(" insert into p values (2,2,2,2)"); pSt = prepareStatement( "update f set w=w+1, x = x+1, y=y+1, z=z+1"); // ok assertUpdateCount(pSt, 1); pSt = prepareStatement("update p set w=w+1, x = x+1, y=y+1, z=z+1"); // ok assertUpdateCount(pSt, 2); pSt = prepareStatement("delete from p where x =1"); // ok assertUpdateCount(pSt, 0); st.executeUpdate("drop table f"); st.executeUpdate("drop table p"); waitForPostCommit(); st.executeUpdate("drop procedure WAIT_FOR_POST_COMMIT"); rollback(); st.close(); } /** * Get a count of number of rows in SYS.SYSDEPENDS */ private int numberOfRowsInSysdepends(Statement st) throws SQLException { final ResultSet rs = st.executeQuery("SELECT COUNT(*) FROM SYS.SYSDEPENDS"); rs.next(); final int result = rs.getInt(1); rs.close(); return result; } private void waitForPostCommit() throws SQLException { final CallableStatement s = prepareCall("CALL WAIT_FOR_POST_COMMIT()"); assertUpdateCount(s, 0); s.close(); } }
apache/lucene
37,870
lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.backward_index; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.io.IOException; import java.util.List; import java.util.Random; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.BinaryPoint; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleDocValuesField; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FloatDocValuesField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.MultiBits; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StandardDirectoryReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; public class TestBasicBackwardsCompatibility extends BackwardsCompatibilityTestBase { static final String INDEX_NAME = "index"; static final String SUFFIX_CFS = "-cfs"; static final String SUFFIX_NO_CFS = "-nocfs"; private static final int DOCS_COUNT = 35; private static final int DELETED_ID = 7; private static final int KNN_VECTOR_MIN_SUPPORTED_VERSION = Version.fromBits(9, 0, 0).major; private static final String KNN_VECTOR_FIELD = "knn_field"; private static final FieldType KNN_VECTOR_FIELD_TYPE = KnnFloatVectorField.createFieldType(3, VectorSimilarityFunction.COSINE); private static final float[] KNN_VECTOR = {0.2f, -0.1f, 0.1f}; /** * A parameter constructor for {@link com.carrotsearch.randomizedtesting.RandomizedRunner}. See * {@link #testVersionsFactory()} for details on the values provided to the framework. */ public TestBasicBackwardsCompatibility(Version version, String pattern) { super(version, pattern); } /** Provides all current version to the test-framework for each of the index suffixes. */ @ParametersFactory(argumentFormatting = "Lucene-Version:%1$s; Pattern: %2$s") public static Iterable<Object[]> testVersionsFactory() throws IllegalAccessException { return allVersion(INDEX_NAME, SUFFIX_CFS, SUFFIX_NO_CFS); } @Override protected void createIndex(Directory directory) throws IOException { if (indexPattern.equals(createPattern(INDEX_NAME, SUFFIX_CFS))) { createIndex(directory, true, false); } else { createIndex(directory, false, false); } } static void createIndex(Directory dir, boolean doCFS, boolean fullyMerged) throws IOException { LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy(); mp.setNoCFSRatio(doCFS ? 1.0 : 0.0); mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // TODO: remove randomness IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) .setCodec(TestUtil.getDefaultCodec()) .setMergePolicy(NoMergePolicy.INSTANCE); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < DOCS_COUNT; i++) { addDoc(writer, i); } assertEquals("wrong doc count", DOCS_COUNT, writer.getDocStats().maxDoc); if (fullyMerged) { writer.forceMerge(1); } writer.close(); if (!fullyMerged) { // open fresh writer so we get no prx file in the added segment mp = new LogByteSizeMergePolicy(); mp.setNoCFSRatio(doCFS ? 1.0 : 0.0); // TODO: remove randomness conf = new IndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) .setCodec(TestUtil.getDefaultCodec()) .setMergePolicy(NoMergePolicy.INSTANCE); writer = new IndexWriter(dir, conf); addNoProxDoc(writer); writer.close(); conf = new IndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) .setCodec(TestUtil.getDefaultCodec()) .setMergePolicy(NoMergePolicy.INSTANCE); writer = new IndexWriter(dir, conf); Term searchTerm = new Term("id", String.valueOf(DELETED_ID)); writer.deleteDocuments(searchTerm); writer.close(); } } static void addDoc(IndexWriter writer, int id) throws IOException { Document doc = new Document(); doc.add(new TextField("content", "aaa", Field.Store.NO)); doc.add(new StringField("id", Integer.toString(id), Field.Store.YES)); FieldType customType2 = new FieldType(TextField.TYPE_STORED); customType2.setStoreTermVectors(true); customType2.setStoreTermVectorPositions(true); customType2.setStoreTermVectorOffsets(true); doc.add( new Field( "autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2)); doc.add( new Field( "utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2)); doc.add(new Field("content2", "here is more content with aaa aaa aaa", customType2)); doc.add(new Field("fie\u2C77ld", "field with non-ascii name", customType2)); // add docvalues fields doc.add(new NumericDocValuesField("dvByte", (byte) id)); byte[] bytes = new byte[] {(byte) (id >>> 24), (byte) (id >>> 16), (byte) (id >>> 8), (byte) id}; BytesRef ref = new BytesRef(bytes); doc.add(new BinaryDocValuesField("dvBytesDerefFixed", ref)); doc.add(new BinaryDocValuesField("dvBytesDerefVar", ref)); doc.add(new SortedDocValuesField("dvBytesSortedFixed", ref)); doc.add(new SortedDocValuesField("dvBytesSortedVar", ref)); doc.add(new BinaryDocValuesField("dvBytesStraightFixed", ref)); doc.add(new BinaryDocValuesField("dvBytesStraightVar", ref)); doc.add(new DoubleDocValuesField("dvDouble", id)); doc.add(new FloatDocValuesField("dvFloat", (float) id)); doc.add(new NumericDocValuesField("dvInt", id)); doc.add(new NumericDocValuesField("dvLong", id)); doc.add(new NumericDocValuesField("dvPacked", id)); doc.add(new NumericDocValuesField("dvShort", (short) id)); doc.add(new SortedSetDocValuesField("dvSortedSet", ref)); doc.add(new SortedNumericDocValuesField("dvSortedNumeric", id)); doc.add(new IntPoint("intPoint1d", id)); doc.add(new IntPoint("intPoint2d", id, 2 * id)); doc.add(new FloatPoint("floatPoint1d", (float) id)); doc.add(new FloatPoint("floatPoint2d", (float) id, (float) 2 * id)); doc.add(new LongPoint("longPoint1d", id)); doc.add(new LongPoint("longPoint2d", id, 2 * id)); doc.add(new DoublePoint("doublePoint1d", id)); doc.add(new DoublePoint("doublePoint2d", id, (double) 2 * id)); doc.add(new BinaryPoint("binaryPoint1d", bytes)); doc.add(new BinaryPoint("binaryPoint2d", bytes, bytes)); // a field with both offsets and term vectors for a cross-check FieldType customType3 = new FieldType(TextField.TYPE_STORED); customType3.setStoreTermVectors(true); customType3.setStoreTermVectorPositions(true); customType3.setStoreTermVectorOffsets(true); customType3.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); doc.add(new Field("content5", "here is more content with aaa aaa aaa", customType3)); // a field that omits only positions FieldType customType4 = new FieldType(TextField.TYPE_STORED); customType4.setStoreTermVectors(true); customType4.setStoreTermVectorPositions(false); customType4.setStoreTermVectorOffsets(true); customType4.setIndexOptions(IndexOptions.DOCS_AND_FREQS); doc.add(new Field("content6", "here is more content with aaa aaa aaa", customType4)); float[] vector = {KNN_VECTOR[0], KNN_VECTOR[1], KNN_VECTOR[2] + 0.1f * id}; doc.add(new KnnFloatVectorField(KNN_VECTOR_FIELD, vector, KNN_VECTOR_FIELD_TYPE)); // TODO: // index different norms types via similarity (we use a random one currently?!) // remove any analyzer randomness, explicitly add payloads for certain fields. writer.addDocument(doc); } static void addNoProxDoc(IndexWriter writer) throws IOException { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.setIndexOptions(IndexOptions.DOCS); Field f = new Field("content3", "aaa", customType); doc.add(f); FieldType customType2 = new FieldType(); customType2.setStored(true); customType2.setIndexOptions(IndexOptions.DOCS); f = new Field("content4", "aaa", customType2); doc.add(f); writer.addDocument(doc); } public static void searchIndex( Directory dir, String oldName, int minIndexMajorVersion, Version nameVersion) throws IOException { // QueryParser parser = new QueryParser("contents", new MockAnalyzer(random)); // Query query = parser.parse("handle:1"); IndexCommit indexCommit = DirectoryReader.listCommits(dir).get(0); IndexReader reader = DirectoryReader.open(indexCommit, minIndexMajorVersion, null); IndexSearcher searcher = newSearcher(reader); TestUtil.checkIndex(dir); final Bits liveDocs = MultiBits.getLiveDocs(reader); assertNotNull(liveDocs); StoredFields storedFields = reader.storedFields(); TermVectors termVectors = reader.termVectors(); for (int i = 0; i < DOCS_COUNT; i++) { if (liveDocs.get(i)) { Document d = storedFields.document(i); List<IndexableField> fields = d.getFields(); boolean isProxDoc = d.getField("content3") == null; if (isProxDoc) { assertEquals(7, fields.size()); IndexableField f = d.getField("id"); assertEquals("" + i, f.stringValue()); f = d.getField("utf8"); assertEquals( "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue()); f = d.getField("autf8"); assertEquals( "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", f.stringValue()); f = d.getField("content2"); assertEquals("here is more content with aaa aaa aaa", f.stringValue()); f = d.getField("fie\u2C77ld"); assertEquals("field with non-ascii name", f.stringValue()); } Fields tfvFields = termVectors.get(i); assertNotNull("i=" + i, tfvFields); Terms tfv = tfvFields.terms("utf8"); assertNotNull("docID=" + i + " index=" + oldName, tfv); } else { assertEquals(DELETED_ID, i); } } // check docvalues fields NumericDocValues dvByte = MultiDocValues.getNumericValues(reader, "dvByte"); BinaryDocValues dvBytesDerefFixed = MultiDocValues.getBinaryValues(reader, "dvBytesDerefFixed"); BinaryDocValues dvBytesDerefVar = MultiDocValues.getBinaryValues(reader, "dvBytesDerefVar"); SortedDocValues dvBytesSortedFixed = MultiDocValues.getSortedValues(reader, "dvBytesSortedFixed"); SortedDocValues dvBytesSortedVar = MultiDocValues.getSortedValues(reader, "dvBytesSortedVar"); BinaryDocValues dvBytesStraightFixed = MultiDocValues.getBinaryValues(reader, "dvBytesStraightFixed"); BinaryDocValues dvBytesStraightVar = MultiDocValues.getBinaryValues(reader, "dvBytesStraightVar"); NumericDocValues dvDouble = MultiDocValues.getNumericValues(reader, "dvDouble"); NumericDocValues dvFloat = MultiDocValues.getNumericValues(reader, "dvFloat"); NumericDocValues dvInt = MultiDocValues.getNumericValues(reader, "dvInt"); NumericDocValues dvLong = MultiDocValues.getNumericValues(reader, "dvLong"); NumericDocValues dvPacked = MultiDocValues.getNumericValues(reader, "dvPacked"); NumericDocValues dvShort = MultiDocValues.getNumericValues(reader, "dvShort"); SortedSetDocValues dvSortedSet = MultiDocValues.getSortedSetValues(reader, "dvSortedSet"); SortedNumericDocValues dvSortedNumeric = MultiDocValues.getSortedNumericValues(reader, "dvSortedNumeric"); for (int i = 0; i < DOCS_COUNT; i++) { int id = Integer.parseInt(storedFields.document(i).get("id")); assertEquals(i, dvByte.nextDoc()); assertEquals(id, dvByte.longValue()); byte[] bytes = new byte[] {(byte) (id >>> 24), (byte) (id >>> 16), (byte) (id >>> 8), (byte) id}; BytesRef expectedRef = new BytesRef(bytes); assertEquals(i, dvBytesDerefFixed.nextDoc()); BytesRef term = dvBytesDerefFixed.binaryValue(); assertEquals(expectedRef, term); assertEquals(i, dvBytesDerefVar.nextDoc()); term = dvBytesDerefVar.binaryValue(); assertEquals(expectedRef, term); assertEquals(i, dvBytesSortedFixed.nextDoc()); term = dvBytesSortedFixed.lookupOrd(dvBytesSortedFixed.ordValue()); assertEquals(expectedRef, term); assertEquals(i, dvBytesSortedVar.nextDoc()); term = dvBytesSortedVar.lookupOrd(dvBytesSortedVar.ordValue()); assertEquals(expectedRef, term); assertEquals(i, dvBytesStraightFixed.nextDoc()); term = dvBytesStraightFixed.binaryValue(); assertEquals(expectedRef, term); assertEquals(i, dvBytesStraightVar.nextDoc()); term = dvBytesStraightVar.binaryValue(); assertEquals(expectedRef, term); assertEquals(i, dvDouble.nextDoc()); assertEquals(id, Double.longBitsToDouble(dvDouble.longValue()), 0D); assertEquals(i, dvFloat.nextDoc()); assertEquals((float) id, Float.intBitsToFloat((int) dvFloat.longValue()), 0F); assertEquals(i, dvInt.nextDoc()); assertEquals(id, dvInt.longValue()); assertEquals(i, dvLong.nextDoc()); assertEquals(id, dvLong.longValue()); assertEquals(i, dvPacked.nextDoc()); assertEquals(id, dvPacked.longValue()); assertEquals(i, dvShort.nextDoc()); assertEquals(id, dvShort.longValue()); assertEquals(i, dvSortedSet.nextDoc()); assertEquals(1, dvSortedSet.docValueCount()); long ord = dvSortedSet.nextOrd(); term = dvSortedSet.lookupOrd(ord); assertEquals(expectedRef, term); assertEquals(i, dvSortedNumeric.nextDoc()); assertEquals(1, dvSortedNumeric.docValueCount()); assertEquals(id, dvSortedNumeric.nextValue()); } ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; // First document should be #0 Document d = storedFields.document(hits[0].doc); assertEquals("didn't get the right document first", "0", d.get("id")); doTestHits(hits, 34, searcher.getIndexReader()); hits = searcher.search(new TermQuery(new Term("content5", "aaa")), 1000).scoreDocs; doTestHits(hits, 34, searcher.getIndexReader()); hits = searcher.search(new TermQuery(new Term("content6", "aaa")), 1000).scoreDocs; doTestHits(hits, 34, searcher.getIndexReader()); hits = searcher.search(new TermQuery(new Term("utf8", "\u0000")), 1000).scoreDocs; assertEquals(34, hits.length); hits = searcher.search(new TermQuery(new Term("utf8", "lu\uD834\uDD1Ece\uD834\uDD60ne")), 1000) .scoreDocs; assertEquals(34, hits.length); hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), 1000).scoreDocs; assertEquals(34, hits.length); doTestHits( searcher.search(IntPoint.newRangeQuery("intPoint1d", 0, 34), 1000).scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search( IntPoint.newRangeQuery("intPoint2d", new int[] {0, 0}, new int[] {34, 68}), 1000) .scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search(FloatPoint.newRangeQuery("floatPoint1d", 0f, 34f), 1000).scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search( FloatPoint.newRangeQuery( "floatPoint2d", new float[] {0f, 0f}, new float[] {34f, 68f}), 1000) .scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search(LongPoint.newRangeQuery("longPoint1d", 0, 34), 1000).scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search( LongPoint.newRangeQuery("longPoint2d", new long[] {0, 0}, new long[] {34, 68}), 1000) .scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search(DoublePoint.newRangeQuery("doublePoint1d", 0.0, 34.0), 1000).scoreDocs, 34, searcher.getIndexReader()); doTestHits( searcher.search( DoublePoint.newRangeQuery( "doublePoint2d", new double[] {0.0, 0.0}, new double[] {34.0, 68.0}), 1000) .scoreDocs, 34, searcher.getIndexReader()); byte[] bytes1 = new byte[4]; byte[] bytes2 = new byte[] {0, 0, 0, (byte) 34}; doTestHits( searcher.search(BinaryPoint.newRangeQuery("binaryPoint1d", bytes1, bytes2), 1000).scoreDocs, 34, searcher.getIndexReader()); byte[] bytes3 = new byte[] {0, 0, 0, (byte) 68}; doTestHits( searcher.search( BinaryPoint.newRangeQuery( "binaryPoint2d", new byte[][] {bytes1, bytes1}, new byte[][] {bytes2, bytes3}), 1000) .scoreDocs, 34, searcher.getIndexReader()); // test vector values and KNN search if (nameVersion.major >= KNN_VECTOR_MIN_SUPPORTED_VERSION) { // test vector values int cnt = 0; for (LeafReaderContext ctx : reader.leaves()) { FloatVectorValues values = ctx.reader().getFloatVectorValues(KNN_VECTOR_FIELD); if (values != null) { assertEquals(KNN_VECTOR_FIELD_TYPE.vectorDimension(), values.dimension()); KnnVectorValues.DocIndexIterator it = values.iterator(); for (int doc = it.nextDoc(); doc != NO_MORE_DOCS; doc = it.nextDoc()) { float[] expectedVector = {KNN_VECTOR[0], KNN_VECTOR[1], KNN_VECTOR[2] + 0.1f * cnt}; assertArrayEquals( "vectors do not match for doc=" + cnt, expectedVector, values.vectorValue(it.index()), 0); cnt++; } } } assertEquals(DOCS_COUNT, cnt); // test KNN search ScoreDoc[] scoreDocs = assertKNNSearch(searcher, KNN_VECTOR, 10, 10, "0"); for (int i = 0; i < scoreDocs.length; i++) { int id = Integer.parseInt(storedFields.document(scoreDocs[i].doc).get("id")); int expectedId = i < DELETED_ID ? i : i + 1; assertEquals(expectedId, id); } } reader.close(); } private static void doTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader) throws IOException { final int hitCount = hits.length; assertEquals("wrong number of hits", expectedCount, hitCount); StoredFields storedFields = reader.storedFields(); TermVectors termVectors = reader.termVectors(); for (ScoreDoc hit : hits) { storedFields.document(hit.doc); termVectors.get(hit.doc); } } static ScoreDoc[] assertKNNSearch( IndexSearcher searcher, float[] queryVector, int k, int expectedHitsCount, String expectedFirstDocId) throws IOException { ScoreDoc[] hits = searcher.search(new KnnFloatVectorQuery(KNN_VECTOR_FIELD, queryVector, k), k).scoreDocs; assertEquals("wrong number of hits", expectedHitsCount, hits.length); Document d = searcher.storedFields().document(hits[0].doc); assertEquals("wrong first document", expectedFirstDocId, d.get("id")); return hits; } public void changeIndexWithAdds(Random random, Directory dir, Version nameVersion) throws IOException { SegmentInfos infos = SegmentInfos.readLatestCommit(dir); assertEquals(nameVersion, infos.getCommitLuceneVersion()); assertEquals(nameVersion, infos.getMinSegmentLuceneVersion()); // open writer IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer(random)) .setCodec(TestUtil.getDefaultCodec()) .setOpenMode(IndexWriterConfig.OpenMode.APPEND) .setMergePolicy(newLogMergePolicy())); // add 10 docs for (int i = 0; i < 10; i++) { addDoc(writer, DOCS_COUNT + i); } // make sure writer sees right total -- writer seems not to know about deletes in .del? final int expected = 45; assertEquals("wrong doc count", expected, writer.getDocStats().numDocs); writer.close(); // make sure searching sees right # hits for term search IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; Document d = searcher.getIndexReader().storedFields().document(hits[0].doc); assertEquals("wrong first document", "0", d.get("id")); doTestHits(hits, 44, searcher.getIndexReader()); if (nameVersion.major >= KNN_VECTOR_MIN_SUPPORTED_VERSION) { // make sure KNN search sees all hits (graph may not be used if k is big) assertKNNSearch(searcher, KNN_VECTOR, 1000, 44, "0"); // make sure KNN search using HNSW graph sees newly added docs assertKNNSearch( searcher, new float[] {KNN_VECTOR[0], KNN_VECTOR[1], KNN_VECTOR[2] + 0.1f * 44}, 10, 10, "44"); } reader.close(); // fully merge writer = new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer(random)) .setCodec(TestUtil.getDefaultCodec()) .setOpenMode(IndexWriterConfig.OpenMode.APPEND) .setMergePolicy(newLogMergePolicy())); writer.forceMerge(1); writer.close(); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); // make sure searching sees right # hits fot term search hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 44, hits.length); d = searcher.storedFields().document(hits[0].doc); doTestHits(hits, 44, searcher.getIndexReader()); assertEquals("wrong first document", "0", d.get("id")); if (nameVersion.major >= KNN_VECTOR_MIN_SUPPORTED_VERSION) { // make sure KNN search sees all hits assertKNNSearch(searcher, KNN_VECTOR, 1000, 44, "0"); // make sure KNN search using HNSW graph sees newly added docs assertKNNSearch( searcher, new float[] {KNN_VECTOR[0], KNN_VECTOR[1], KNN_VECTOR[2] + 0.1f * 44}, 10, 10, "44"); } reader.close(); } public void changeIndexNoAdds(Random random, Directory dir, Version nameVersion) throws IOException { // make sure searching sees right # hits for term search DirectoryReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 34, hits.length); Document d = searcher.storedFields().document(hits[0].doc); assertEquals("wrong first document", "0", d.get("id")); if (nameVersion.major >= KNN_VECTOR_MIN_SUPPORTED_VERSION) { // make sure KNN search sees all hits assertKNNSearch(searcher, KNN_VECTOR, 1000, 34, "0"); // make sure KNN search using HNSW graph retrieves correct results assertKNNSearch(searcher, KNN_VECTOR, 10, 10, "0"); } reader.close(); // fully merge IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer(random)) .setCodec(TestUtil.getDefaultCodec()) .setOpenMode(IndexWriterConfig.OpenMode.APPEND)); writer.forceMerge(1); writer.close(); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); // make sure searching sees right # hits fot term search hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 34, hits.length); doTestHits(hits, 34, searcher.getIndexReader()); // make sure searching sees right # hits for KNN search if (nameVersion.major >= KNN_VECTOR_MIN_SUPPORTED_VERSION) { // make sure KNN search sees all hits assertKNNSearch(searcher, KNN_VECTOR, 1000, 34, "0"); // make sure KNN search using HNSW graph retrieves correct results assertKNNSearch(searcher, KNN_VECTOR, 10, 10, "0"); } reader.close(); } // flex: test basics of TermsEnum api on non-flex index public void testNextIntoWrongField() throws Exception { IndexReader r = DirectoryReader.open(directory); TermsEnum terms = MultiTerms.getTerms(r, "content").iterator(); BytesRef t = terms.next(); assertNotNull(t); // content field only has term aaa: assertEquals("aaa", t.utf8ToString()); assertNull(terms.next()); BytesRef aaaTerm = new BytesRef("aaa"); // should be found exactly assertEquals(TermsEnum.SeekStatus.FOUND, terms.seekCeil(aaaTerm)); assertEquals(DOCS_COUNT, countDocs(TestUtil.docs(random(), terms, null, PostingsEnum.NONE))); assertNull(terms.next()); // should hit end of field assertEquals(TermsEnum.SeekStatus.END, terms.seekCeil(new BytesRef("bbb"))); assertNull(terms.next()); // should seek to aaa assertEquals(TermsEnum.SeekStatus.NOT_FOUND, terms.seekCeil(new BytesRef("a"))); assertTrue(terms.term().bytesEquals(aaaTerm)); assertEquals(DOCS_COUNT, countDocs(TestUtil.docs(random(), terms, null, PostingsEnum.NONE))); assertNull(terms.next()); assertEquals(TermsEnum.SeekStatus.FOUND, terms.seekCeil(aaaTerm)); assertEquals(DOCS_COUNT, countDocs(TestUtil.docs(random(), terms, null, PostingsEnum.NONE))); assertNull(terms.next()); r.close(); } /** * Test that we didn't forget to bump the current Constants.LUCENE_MAIN_VERSION. This is important * so that we can determine which version of lucene wrote the segment. */ public void testOldVersions() throws Exception { // first create a little index with the current code and get the version Directory currentDir = newDirectory(); RandomIndexWriter riw = new RandomIndexWriter(random(), currentDir); riw.addDocument(new Document()); riw.close(); DirectoryReader ir = DirectoryReader.open(currentDir); SegmentReader air = (SegmentReader) ir.leaves().get(0).reader(); Version currentVersion = air.getSegmentInfo().info.getVersion(); assertNotNull(currentVersion); // only 3.0 segments can have a null version ir.close(); currentDir.close(); // now check all the old indexes, their version should be < the current version DirectoryReader r = DirectoryReader.open(directory); for (LeafReaderContext context : r.leaves()) { air = (SegmentReader) context.reader(); Version oldVersion = air.getSegmentInfo().info.getVersion(); assertNotNull(oldVersion); // only 3.0 segments can have a null version assertTrue( "current Version.LATEST is <= an old index: did you forget to bump it?!", currentVersion.onOrAfter(oldVersion)); } r.close(); } public void testIndexCreatedVersion() throws IOException { SegmentInfos infos = SegmentInfos.readLatestCommit(directory); // those indexes are created by a single version so we can // compare the commit version with the created version assertEquals(infos.getCommitLuceneVersion().major, infos.getIndexCreatedVersionMajor()); } public void testSegmentCommitInfoId() throws IOException { Directory dir = this.directory; SegmentInfos infos = SegmentInfos.readLatestCommit(dir); for (SegmentCommitInfo info : infos) { if (info.info.getVersion().onOrAfter(Version.fromBits(8, 6, 0))) { assertNotNull(info.toString(), info.getId()); } else { assertNull(info.toString(), info.getId()); } } } private int countDocs(PostingsEnum docs) throws IOException { int count = 0; while ((docs.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { count++; } return count; } public void testIndexOldIndexNoAdds() throws Exception { try (Directory dir = newDirectory(directory)) { changeIndexNoAdds(random(), dir, version); } } public void testIndexOldIndex() throws Exception { try (Directory dir = newDirectory(directory)) { changeIndexWithAdds(random(), dir, version); } } public void testSearchOldIndex() throws Exception { searchIndex(directory, indexPattern, Version.MIN_SUPPORTED_MAJOR, version); } public void testFullyMergeOldIndex() throws Exception { try (Directory dir = newDirectory(this.directory)) { final SegmentInfos oldSegInfos = SegmentInfos.readLatestCommit(dir); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))); w.forceMerge(1); w.close(); final SegmentInfos segInfos = SegmentInfos.readLatestCommit(dir); assertEquals( oldSegInfos.getIndexCreatedVersionMajor(), segInfos.getIndexCreatedVersionMajor()); assertEquals(Version.LATEST, segInfos.asList().get(0).info.getVersion()); assertEquals( oldSegInfos.asList().get(0).info.getMinVersion(), segInfos.asList().get(0).info.getMinVersion()); } } public void testAddOldIndexes() throws IOException { SegmentInfos infos = SegmentInfos.readLatestCommit(directory); Directory targetDir = newDirectory(); if (infos.getCommitLuceneVersion().major != Version.LATEST.major) { // both indexes are not compatible Directory targetDir2 = newDirectory(); IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random()))); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> w.addIndexes(directory)); assertTrue( e.getMessage(), e.getMessage() .startsWith( "Cannot use addIndexes(Directory) with indexes that have been created by a different Lucene version.")); w.close(); targetDir2.close(); // for the next test, we simulate writing to an index that was created on the same major // version new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir); } IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random()))); w.addIndexes(directory); w.close(); SegmentInfos si = SegmentInfos.readLatestCommit(targetDir); assertNull( "none of the segments should have been upgraded", si.asList().stream() .filter( // depending on the MergePolicy we might see these segments merged away sci -> sci.getId() != null && sci.info.getVersion().onOrAfter(Version.fromBits(8, 6, 0)) == false) .findAny() .orElse(null)); if (VERBOSE) { System.out.println("\nTEST: done adding indices; now close"); } targetDir.close(); } public void testAddOldIndexesReader() throws IOException { SegmentInfos infos = SegmentInfos.readLatestCommit(directory); DirectoryReader reader = DirectoryReader.open(directory); Directory targetDir = newDirectory(); if (infos.getCommitLuceneVersion().major != Version.LATEST.major) { Directory targetDir2 = newDirectory(); IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random()))); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TestUtil.addIndexesSlowly(w, reader)); assertEquals( e.getMessage(), "Cannot merge a segment that has been created with major version 10 into this index which has been created by major version 11"); w.close(); targetDir2.close(); // for the next test, we simulate writing to an index that was created on the same major // version new SegmentInfos(infos.getIndexCreatedVersionMajor()).commit(targetDir); } IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random()))); TestUtil.addIndexesSlowly(w, reader); w.close(); reader.close(); SegmentInfos si = SegmentInfos.readLatestCommit(targetDir); assertNull( "all SCIs should have an id now", si.asList().stream().filter(sci -> sci.getId() == null).findAny().orElse(null)); targetDir.close(); } public void testFailOpenOldIndex() throws IOException { assumeFalse("doesn't work on current index", version.major == Version.LATEST.major); IndexCommit commit = DirectoryReader.listCommits(directory).get(0); IndexFormatTooOldException ex = expectThrows( IndexFormatTooOldException.class, () -> StandardDirectoryReader.open(commit, Version.LATEST.major, null)); assertTrue( ex.getMessage() .contains( "This Lucene version only supports indexes created with major version " + Version.LATEST.major + " or later")); // now open with allowed min version StandardDirectoryReader.open(commit, Version.MIN_SUPPORTED_MAJOR, null).close(); } public void testOpenModeAndCreatedVersion() throws IOException { Directory dir = newDirectory(directory); int majorVersion = SegmentInfos.readLatestCommit(dir).getIndexCreatedVersionMajor(); if (majorVersion != Version.MIN_SUPPORTED_MAJOR && majorVersion != Version.LATEST.major) { fail( "expected one of: [" + Version.MIN_SUPPORTED_MAJOR + ", " + Version.LATEST.major + "] but got: " + majorVersion); } for (IndexWriterConfig.OpenMode openMode : IndexWriterConfig.OpenMode.values()) { Directory tmpDir = newDirectory(dir); IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode)); w.commit(); w.close(); switch (openMode) { case CREATE: assertEquals( Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); break; case APPEND: case CREATE_OR_APPEND: default: assertEquals( majorVersion, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); } tmpDir.close(); } dir.close(); } }
apache/hadoop
37,901
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesForCSWithPartitions.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.webapp; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.FN_ACT_ALLOCATION_STATE; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.FN_ACT_ALLOCATIONS; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.FN_ACT_DIAGNOSTIC; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.FN_SCHEDULER_ACT_NAME; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.FN_SCHEDULER_ACT_ROOT; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.getFirstSubNodeFromJson; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.ActivitiesTestUtils.verifyNumberOfAllocations; import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.createRM; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.StringReader; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.function.Predicate; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Application; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.util.Sets; import org.apache.hadoop.util.XMLUtils; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.MockAM; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmissionData; import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmitter; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivityDiagnosticConstant; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivityState; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.QueuePath; import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.JerseyTestBase; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; import org.glassfish.jersey.internal.inject.AbstractBinder; import org.glassfish.jersey.jettison.JettisonFeature; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.test.TestProperties; public class TestRMWebServicesForCSWithPartitions extends JerseyTestBase { private static final String DEFAULT_PARTITION = ""; private static final String CAPACITIES = "capacities"; private static final String RESOURCE_USAGES_BY_PARTITION = "resourceUsagesByPartition"; private static final String QUEUE_CAPACITIES_BY_PARTITION = "queueCapacitiesByPartition"; private static final String QUEUE_C = "Qc"; private static final String LEAF_QUEUE_C1 = "Qc1"; private static final String LEAF_QUEUE_C2 = "Qc2"; private static final String QUEUE_B = "Qb"; private static final String QUEUE_A = "Qa"; private static final String LABEL_LY = "Ly"; private static final String LABEL_LX = "Lx"; private static final QueuePath ROOT_QUEUE_PATH = new QueuePath(CapacitySchedulerConfiguration.ROOT); private static final ImmutableSet<String> CLUSTER_LABELS = ImmutableSet.of(LABEL_LX, LABEL_LY, DEFAULT_PARTITION); private static final String DOT = "."; private static final double EPSILON = 1e-1f; private MockRM rm; private CapacitySchedulerConfiguration csConf; private YarnConfiguration conf; private boolean legacyQueueMode; public static Collection<Boolean> getParameters() { return Arrays.asList(true, false); } private MockNM nm1; @Override protected Application configure() { ResourceConfig config = new ResourceConfig(); config.register(RMWebServices.class); config.register(new JerseyBinder()); config.register(GenericExceptionHandler.class); config.register(new JettisonFeature()).register(JAXBContextResolver.class); forceSet(TestProperties.CONTAINER_PORT, JERSEY_RANDOM_PORT); return config; } private class JerseyBinder extends AbstractBinder { @Override protected void configure() { try { csConf = new CapacitySchedulerConfiguration(); setupQueueConfiguration(csConf); conf = new YarnConfiguration(csConf); rm = createRM(conf); Set<NodeLabel> labels = new HashSet<NodeLabel>(); labels.add(NodeLabel.newInstance(LABEL_LX)); labels.add(NodeLabel.newInstance(LABEL_LY)); try { RMNodeLabelsManager nodeLabelManager = rm.getRMContext().getNodeLabelManager(); nodeLabelManager.addToCluserNodeLabels(labels); } catch (Exception e) { fail(); } rm.start(); rm.getRMContext().getNodeLabelManager().addLabelsToNode(ImmutableMap .of(NodeId.newInstance("127.0.0.1", 0), Sets.newHashSet(LABEL_LX))); nm1 = new MockNM("127.0.0.1:1234", 2 * 1024, rm.getResourceTrackerService()); MockNM nm2 = new MockNM("127.0.0.2:1234", 2 * 1024, rm.getResourceTrackerService()); nm1.registerNode(); nm2.registerNode(); rm.getRMContext().getNodeLabelManager().addLabelsToNode(ImmutableMap .of(NodeId.newInstance("127.0.0.2", 0), Sets.newHashSet(LABEL_LY))); MockNM nm3 = new MockNM("127.0.0.2:1234", 128 * 1024, rm.getResourceTrackerService()); nm3.registerNode(); // Default partition MockNM nm4 = new MockNM("127.0.0.3:1234", 128 * 1024, rm.getResourceTrackerService()); nm4.registerNode(); HttpServletRequest request = mock(HttpServletRequest.class); when(request.getScheme()).thenReturn("http"); final HttpServletResponse response = mock(HttpServletResponse.class); bind(rm).to(ResourceManager.class).named("rm"); bind(conf).to(Configuration.class).named("conf"); bind(request).to(HttpServletRequest.class); bind(response).to(HttpServletResponse.class); } catch (Exception e) { throw new RuntimeException(e); } } } private void setupQueueConfiguration( CapacitySchedulerConfiguration config) { config.setLegacyQueueModeEnabled(legacyQueueMode); // Define top-level queues config.setQueues(ROOT_QUEUE_PATH, new String[] { QUEUE_A, QUEUE_B, QUEUE_C }); String interMediateQueueC = CapacitySchedulerConfiguration.ROOT + "." + QUEUE_C; QueuePath interMediateQueueCPath = new QueuePath(interMediateQueueC); config.setQueues(interMediateQueueCPath, new String[] { LEAF_QUEUE_C1, LEAF_QUEUE_C2 }); config.setCapacityByLabel(ROOT_QUEUE_PATH, LABEL_LX, 100); config.setCapacityByLabel(ROOT_QUEUE_PATH, LABEL_LY, 100); String leafQueueA = CapacitySchedulerConfiguration.ROOT + "." + QUEUE_A; QueuePath leafQueueAPath = new QueuePath(leafQueueA); config.setCapacity(leafQueueAPath, 30); config.setMaximumCapacity(leafQueueAPath, 50); String leafQueueB = CapacitySchedulerConfiguration.ROOT + "." + QUEUE_B; QueuePath leafQueueBPath = new QueuePath(leafQueueB); config.setCapacity(leafQueueBPath, 30); config.setMaximumCapacity(leafQueueBPath, 50); config.setCapacity(interMediateQueueCPath, 40); config.setMaximumCapacity(interMediateQueueCPath, 50); String leafQueueC1 = interMediateQueueC + "." + LEAF_QUEUE_C1; QueuePath leafQueueC1Path = new QueuePath(leafQueueC1); config.setCapacity(leafQueueC1Path, 50); config.setMaximumCapacity(leafQueueC1Path, 60); String leafQueueC2 = interMediateQueueC + "." + LEAF_QUEUE_C2; QueuePath leafQueueC2Path = new QueuePath(leafQueueC2); config.setCapacity(leafQueueC2Path, 50); config.setMaximumCapacity(leafQueueC2Path, 70); // Define label specific configuration config.setAccessibleNodeLabels( leafQueueAPath, ImmutableSet.of(DEFAULT_PARTITION)); config.setAccessibleNodeLabels(leafQueueBPath, ImmutableSet.of(LABEL_LX)); config.setAccessibleNodeLabels(interMediateQueueCPath, ImmutableSet.of(LABEL_LX, LABEL_LY)); config.setAccessibleNodeLabels(leafQueueC1Path, ImmutableSet.of(LABEL_LX, LABEL_LY)); config.setAccessibleNodeLabels(leafQueueC2Path, ImmutableSet.of(LABEL_LX, LABEL_LY)); config.setDefaultNodeLabelExpression(leafQueueBPath, LABEL_LX); config.setDefaultNodeLabelExpression(leafQueueC1Path, LABEL_LX); config.setDefaultNodeLabelExpression(leafQueueC2Path, LABEL_LY); config.setCapacityByLabel(leafQueueBPath, LABEL_LX, 30); config.setCapacityByLabel(interMediateQueueCPath, LABEL_LX, 70); config.setCapacityByLabel(leafQueueC1Path, LABEL_LX, 40); config.setCapacityByLabel(leafQueueC2Path, LABEL_LX, 60); config.setCapacityByLabel(interMediateQueueCPath, LABEL_LY, 100); config.setCapacityByLabel(leafQueueC1Path, LABEL_LY, 50); config.setCapacityByLabel(leafQueueC2Path, LABEL_LY, 50); config.setMaximumCapacityByLabel(leafQueueC1Path, LABEL_LY, 75); config.setMaximumCapacityByLabel(leafQueueC2Path, LABEL_LY, 75); } @Override public void setUp() throws Exception { super.setUp(); } @AfterEach public void tearDown() { if (rm != null) { rm.stop(); } } public void initTestRMWebServicesForCSWithPartitions(boolean pLegacyQueueMode) throws Exception { this.legacyQueueMode = pLegacyQueueMode; setUp(); } @MethodSource("getParameters") @ParameterizedTest(name = "{index}: legacy-queue-mode={0}") public void testSchedulerPartitions(boolean pLegacyQueueMode) throws JSONException, Exception { initTestRMWebServicesForCSWithPartitions(pLegacyQueueMode); WebTarget r = targetWithJsonObject(); Response response = r.path("ws").path("v1").path("cluster").path("scheduler") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); verifySchedulerInfoJson(json); } @MethodSource("getParameters") @ParameterizedTest(name = "{index}: legacy-queue-mode={0}") public void testSchedulerPartitionsSlash(boolean pLegacyQueueMode) throws JSONException, Exception { initTestRMWebServicesForCSWithPartitions(pLegacyQueueMode); WebTarget r = targetWithJsonObject(); Response response = r.path("ws").path("v1").path("cluster").path("scheduler/") .request(MediaType.APPLICATION_JSON).get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); verifySchedulerInfoJson(json); } @MethodSource("getParameters") @ParameterizedTest(name = "{index}: legacy-queue-mode={0}") public void testSchedulerPartitionsDefault(boolean pLegacyQueueMode) throws JSONException, Exception { initTestRMWebServicesForCSWithPartitions(pLegacyQueueMode); WebTarget r = targetWithJsonObject(); Response response = r.path("ws").path("v1").path("cluster") .path("scheduler").request().get(Response.class); assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); JSONObject json = response.readEntity(JSONObject.class); verifySchedulerInfoJson(json); } @MethodSource("getParameters") @ParameterizedTest(name = "{index}: legacy-queue-mode={0}") public void testSchedulerPartitionsXML(boolean pLegacyQueueMode) throws JSONException, Exception { initTestRMWebServicesForCSWithPartitions(pLegacyQueueMode); WebTarget r = target(); Response response = r.path("ws").path("v1").path("cluster").path("scheduler") .request(MediaType.APPLICATION_XML).get(Response.class); assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8, response.getMediaType().toString()); String xml = response.readEntity(String.class); DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); verifySchedulerInfoXML(dom); } @MethodSource("getParameters") @ParameterizedTest(name = "{index}: legacy-queue-mode={0}") public void testPartitionInSchedulerActivities(boolean pLegacyQueueMode) throws Exception { initTestRMWebServicesForCSWithPartitions(pLegacyQueueMode); RMApp app1 = MockRMAppSubmitter.submit(rm, MockRMAppSubmissionData.Builder.createWithMemory(1024, rm) .withAppName("app1") .withUser("user1") .withAcls(null) .withQueue(QUEUE_B) .withAmLabel(LABEL_LX) .build()); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm, nm1); am1.allocate(Arrays.asList( ResourceRequest.newBuilder().priority(Priority.UNDEFINED) .resourceName("*").nodeLabelExpression(LABEL_LX) .capability(Resources.createResource(2048)).numContainers(1) .build()), null); WebTarget sr = target().path(RMWSConsts.RM_WEB_SERVICE_PATH) .path(RMWSConsts.SCHEDULER_ACTIVITIES); ActivitiesTestUtils.requestWebResource(sr, null); nm1.nodeHeartbeat(true); Thread.sleep(1000); JSONObject schedulerActivitiesJson = ActivitiesTestUtils.requestWebResource(sr, null); /* * verify scheduler activities */ verifyNumberOfAllocations(schedulerActivitiesJson, 1); // verify queue Qb Predicate<JSONObject> findQueueBPred = (obj) -> obj.optString(FN_SCHEDULER_ACT_NAME) .equals(CapacitySchedulerConfiguration.ROOT + DOT + QUEUE_B); List<JSONObject> queueBObj = ActivitiesTestUtils.findInAllocations( getFirstSubNodeFromJson(schedulerActivitiesJson, FN_SCHEDULER_ACT_ROOT, FN_ACT_ALLOCATIONS), findQueueBPred); assertEquals(1, queueBObj.size()); assertEquals(ActivityState.REJECTED.name(), queueBObj.get(0).optString(FN_ACT_ALLOCATION_STATE)); assertEquals(ActivityDiagnosticConstant.QUEUE_DO_NOT_HAVE_ENOUGH_HEADROOM + " from " + am1.getApplicationAttemptId().getApplicationId(), queueBObj.get(0).optString(FN_ACT_DIAGNOSTIC)); // verify queue Qa Predicate<JSONObject> findQueueAPred = (obj) -> obj.optString(FN_SCHEDULER_ACT_NAME) .equals(CapacitySchedulerConfiguration.ROOT + DOT + QUEUE_A); List<JSONObject> queueAObj = ActivitiesTestUtils.findInAllocations( getFirstSubNodeFromJson(schedulerActivitiesJson, FN_SCHEDULER_ACT_ROOT, FN_ACT_ALLOCATIONS), findQueueAPred); assertEquals(1, queueAObj.size()); assertEquals(ActivityState.REJECTED.name(), queueAObj.get(0).optString(FN_ACT_ALLOCATION_STATE)); assertEquals( ActivityDiagnosticConstant.QUEUE_NOT_ABLE_TO_ACCESS_PARTITION, queueAObj.get(0).optString(FN_ACT_DIAGNOSTIC)); // verify queue Qc Predicate<JSONObject> findQueueCPred = (obj) -> obj.optString(FN_SCHEDULER_ACT_NAME) .equals(CapacitySchedulerConfiguration.ROOT + DOT + QUEUE_C); List<JSONObject> queueCObj = ActivitiesTestUtils.findInAllocations( getFirstSubNodeFromJson(schedulerActivitiesJson, FN_SCHEDULER_ACT_ROOT, FN_ACT_ALLOCATIONS), findQueueCPred); assertEquals(1, queueCObj.size()); assertEquals(ActivityState.SKIPPED.name(), queueCObj.get(0).optString(FN_ACT_ALLOCATION_STATE)); assertEquals(ActivityDiagnosticConstant.QUEUE_DO_NOT_NEED_MORE_RESOURCE, queueCObj.get(0).optString(FN_ACT_DIAGNOSTIC)); } private void verifySchedulerInfoXML(Document dom) throws Exception { NodeList scheduler = dom.getElementsByTagName("scheduler"); assertEquals(1, scheduler.getLength(), "incorrect number of elements"); NodeList schedulerInfo = dom.getElementsByTagName("schedulerInfo"); assertEquals(1, schedulerInfo.getLength(), "incorrect number of elements"); for (int i = 0; i < schedulerInfo.getLength(); i++) { Element element = (Element) schedulerInfo.item(i); NodeList children = element.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Element schedulerInfoElem = (Element) children.item(j); if (schedulerInfoElem.getTagName().equals("queues")) { NodeList qListInfos = schedulerInfoElem.getChildNodes(); for (int k = 0; k < qListInfos.getLength(); k++) { Element qElem2 = (Element) qListInfos.item(k); String queue = WebServicesTestUtils.getXmlString(qElem2, "queueName"); switch (queue) { case QUEUE_A: verifyQueueAInfoXML(qElem2); break; case QUEUE_B: verifyQueueBInfoXML(qElem2); break; case QUEUE_C: verifyQueueCInfoXML(qElem2); break; default: fail("Unexpected queue" + queue); } } } else if (schedulerInfoElem.getTagName().equals(CAPACITIES)) { NodeList capacitiesListInfos = schedulerInfoElem.getChildNodes(); assertEquals(3, capacitiesListInfos.getLength(), "incorrect number of partitions"); for (int k = 0; k < capacitiesListInfos.getLength(); k++) { Element partitionCapacitiesInfo = (Element) capacitiesListInfos.item(k); String partitionName = WebServicesTestUtils .getXmlString(partitionCapacitiesInfo, "partitionName"); assertTrue(CLUSTER_LABELS.contains(partitionName), "invalid PartitionCapacityInfo"); verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, 100, 0, 100, 100, 0, 100); } } } } } private void verifyQueueAInfoXML(Element queueElem) { NodeList children = queueElem.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Element queueChildElem = (Element) children.item(j); if (queueChildElem.getTagName().equals(CAPACITIES)) { NodeList capacitiesListInfos = queueChildElem.getChildNodes(); assertEquals(1, capacitiesListInfos.getLength(), "incorrect number of partitions"); Element partitionCapacitiesInfo = (Element) capacitiesListInfos.item(0); String partitionName = WebServicesTestUtils .getXmlString(partitionCapacitiesInfo, "partitionName"); assertTrue(partitionName.isEmpty(), "invalid PartitionCapacityInfo"); verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, 30, 0, 50, 30, 0, 50); } else if (queueChildElem.getTagName().equals("resources")) { verifyResourceUsageInfoXML(queueChildElem); } } } private void verifyQueueBInfoXML(Element queueElem) { assertEquals(LABEL_LX, WebServicesTestUtils.getXmlString(queueElem, "defaultNodeLabelExpression"), "Invalid default Label expression"); NodeList children = queueElem.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Element queueChildElem = (Element) children.item(j); if (queueChildElem.getTagName().equals(CAPACITIES)) { NodeList capacitiesListInfos = queueChildElem.getChildNodes(); assertEquals(2, capacitiesListInfos.getLength(), "incorrect number of partitions"); for (int k = 0; k < capacitiesListInfos.getLength(); k++) { Element partitionCapacitiesInfo = (Element) capacitiesListInfos.item(k); String partitionName = WebServicesTestUtils .getXmlString(partitionCapacitiesInfo, "partitionName"); switch (partitionName) { case LABEL_LX: verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, 30, 0, 100, 30, 0, 100); break; case DEFAULT_PARTITION: verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, 30, 0, 50, 30, 0, 50); break; default: fail("Unexpected partition" + partitionName); } } } } assertEquals(LABEL_LX, WebServicesTestUtils.getXmlString(queueElem, "nodeLabels"), "Node Labels are not matching"); } private void verifyQueueCInfoXML(Element queueElem) { NodeList children = queueElem.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Element queueChildElem = (Element) children.item(j); if (queueChildElem.getTagName().equals(CAPACITIES)) { verifyQcCapacitiesInfoXML(queueChildElem, 70, 100, 70, 100, 100, 100, 100, 100, 40, 50, 40, 50); } else if (queueChildElem.getTagName().equals("resources")) { verifyResourceUsageInfoXML(queueChildElem); } else if (queueChildElem.getTagName().equals("queues")) { NodeList qListInfos = queueChildElem.getChildNodes(); for (int k = 0; k < qListInfos.getLength(); k++) { Element qElem2 = (Element) qListInfos.item(k); String queue = WebServicesTestUtils.getXmlString(qElem2, "queueName"); switch (queue) { case LEAF_QUEUE_C1: assertEquals(LABEL_LX, WebServicesTestUtils.getXmlString(qElem2, "defaultNodeLabelExpression"), "Invalid default Label expression"); NodeList queuec1Children = qElem2.getChildNodes(); for (int l = 0; l < queuec1Children.getLength(); l++) { Element queueC1ChildElem = (Element) queuec1Children.item(l); if (queueC1ChildElem.getTagName().equals(CAPACITIES)) { verifyQcCapacitiesInfoXML(queueC1ChildElem, 40, 100, 28, 100, 50, 75, 50, 75, 50, 60, 20, 30); } } break; case LEAF_QUEUE_C2: assertEquals(LABEL_LY, WebServicesTestUtils.getXmlString(qElem2, "defaultNodeLabelExpression"), "Invalid default Label expression"); NodeList queuec2Children = qElem2.getChildNodes(); for (int l = 0; l < queuec2Children.getLength(); l++) { Element queueC2ChildElem = (Element) queuec2Children.item(l); if (queueC2ChildElem.getTagName().equals(CAPACITIES)) { verifyQcCapacitiesInfoXML(queueC2ChildElem, 60, 100, 42, 100, 50, 75, 50, 75, 50, 70, 20, 35); } } break; default: fail("Unexpected queue" + queue); } } } } } private void verifyQcCapacitiesInfoXML(Element partitionCapacitiesElem, float lxCaps, float lxMaxCaps, float lxAbsCaps, float lxAbsMaxCaps, float lyCaps, float lyMaxCaps, float lyAbsCaps, float lyAbsMaxCaps, float defCaps, float defMaxCaps, float defAbsCaps, float defAbsMaxCaps) { NodeList capacitiesListInfos = partitionCapacitiesElem.getChildNodes(); assertEquals(3, capacitiesListInfos.getLength(), "incorrect number of partitions"); for (int k = 0; k < capacitiesListInfos.getLength(); k++) { Element partitionCapacitiesInfo = (Element) capacitiesListInfos.item(k); String partitionName = WebServicesTestUtils .getXmlString(partitionCapacitiesInfo, "partitionName"); switch (partitionName) { case LABEL_LX: verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, lxCaps, 0, lxMaxCaps, lxAbsCaps, 0, lxAbsMaxCaps); break; case LABEL_LY: verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, lyCaps, 0, lyMaxCaps, lyAbsCaps, 0, lyAbsMaxCaps); break; case DEFAULT_PARTITION: verifyPartitionCapacityInfoXML(partitionCapacitiesInfo, defCaps, 0, defMaxCaps, defAbsCaps, 0, defAbsMaxCaps); break; default: fail("Unexpected partition" + partitionName); } } } private void verifyResourceUsageInfoXML(Element queueChildElem) { NodeList resourceUsageInfo = queueChildElem.getChildNodes(); assertEquals(1, resourceUsageInfo.getLength(), "incorrect number of partitions"); Element partitionResourceUsageInfo = (Element) resourceUsageInfo.item(0); String partitionName = WebServicesTestUtils .getXmlString(partitionResourceUsageInfo, "partitionName"); assertTrue(DEFAULT_PARTITION.equals(partitionName), "invalid PartitionCapacityInfo"); } private void verifyPartitionCapacityInfoXML(Element partitionInfo, float capacity, float usedCapacity, float maxCapacity, float absoluteCapacity, float absoluteUsedCapacity, float absoluteMaxCapacity) { assertEquals(capacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "capacity"), EPSILON, "capacity doesn't match"); assertEquals(usedCapacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "usedCapacity"), EPSILON, "capacity doesn't match"); assertEquals(maxCapacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "maxCapacity"), EPSILON, "capacity doesn't match"); assertEquals(absoluteCapacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "absoluteCapacity"), EPSILON, "capacity doesn't match"); assertEquals(absoluteUsedCapacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "absoluteUsedCapacity"), EPSILON, "capacity doesn't match"); assertEquals(absoluteMaxCapacity, WebServicesTestUtils.getXmlFloat(partitionInfo, "absoluteMaxCapacity"), EPSILON, "capacity doesn't match"); } private void verifySchedulerInfoJson(JSONObject json) throws JSONException, Exception { assertEquals(1, json.length(), "incorrect number of elements"); JSONObject info = json.getJSONObject("scheduler"); assertEquals(1, info.length(), "incorrect number of elements"); info = info.getJSONObject("schedulerInfo"); assertEquals(25, info.length(), "incorrect number of elements"); JSONObject capacitiesJsonObject = info.getJSONObject(CAPACITIES); JSONArray partitionsCapsArray = capacitiesJsonObject.getJSONArray(QUEUE_CAPACITIES_BY_PARTITION); assertEquals(CLUSTER_LABELS.size(), partitionsCapsArray.length(), "incorrect number of elements"); for (int i = 0; i < partitionsCapsArray.length(); i++) { JSONObject partitionInfo = partitionsCapsArray.getJSONObject(i); String partitionName = partitionInfo.getString("partitionName"); assertTrue(CLUSTER_LABELS.contains(partitionName), "Unknown partition received"); verifyPartitionCapacityInfoJson(partitionInfo, 100, 0, 100, 100, 0, 100); } JSONObject jsonQueuesObject = info.getJSONObject("queues"); JSONArray queuesArray = jsonQueuesObject.getJSONArray("queue"); for (int i = 0; i < queuesArray.length(); i++) { JSONObject queueJson = queuesArray.getJSONObject(i); String queue = queueJson.getString("queueName"); Object resourceUsagesByPartition = queueJson.getJSONObject("resources"). get(RESOURCE_USAGES_BY_PARTITION); JSONArray resourceUsageByPartition = new JSONArray(); if (resourceUsagesByPartition instanceof JSONArray) { resourceUsageByPartition = JSONArray.class.cast(resourceUsagesByPartition); } else { resourceUsageByPartition.put(JSONObject.class.cast(resourceUsagesByPartition)); } JSONObject resourcesJsonObject = queueJson.getJSONObject("resources"); Object resourceUsagesByPartition2 = resourcesJsonObject. get(RESOURCE_USAGES_BY_PARTITION); JSONArray partitionsResourcesArray = new JSONArray(); if (resourceUsagesByPartition2 instanceof JSONArray) { partitionsResourcesArray = JSONArray.class.cast(resourceUsagesByPartition2); } else { partitionsResourcesArray.put(resourceUsagesByPartition2); } capacitiesJsonObject = queueJson.getJSONObject(CAPACITIES); Object queueCapacitiesByPartition = capacitiesJsonObject. get(QUEUE_CAPACITIES_BY_PARTITION); partitionsCapsArray = new JSONArray(); if (queueCapacitiesByPartition instanceof JSONArray) { partitionsCapsArray = JSONArray.class.cast(queueCapacitiesByPartition); } else { partitionsCapsArray.put(queueCapacitiesByPartition); } JSONObject partitionInfo = null; String partitionName = null; switch (queue) { case QUEUE_A: assertEquals(1, partitionsCapsArray.length(), "incorrect number of partitions"); partitionInfo = partitionsCapsArray.getJSONObject(0); partitionName = partitionInfo.getString("partitionName"); verifyPartitionCapacityInfoJson(partitionInfo, 30, 0, 50, 30, 0, 50); assertEquals(7, partitionsResourcesArray.getJSONObject(0).length(), "incorrect number of elements"); assertEquals(1, resourceUsageByPartition.length(), "incorrect number of objects"); break; case QUEUE_B: assertEquals(LABEL_LX, queueJson.getString("defaultNodeLabelExpression"), "Invalid default Label expression"); assertEquals(7, partitionsResourcesArray.getJSONObject(0).length(), "incorrect number of elements"); verifyAccesibleNodeLabels(queueJson, ImmutableSet.of(LABEL_LX)); assertEquals(2, partitionsCapsArray.length(), "incorrect number of partitions"); assertEquals(2, resourceUsageByPartition.length(), "incorrect number of objects"); for (int j = 0; j < partitionsCapsArray.length(); j++) { partitionInfo = partitionsCapsArray.getJSONObject(j); partitionName = partitionInfo.getString("partitionName"); switch (partitionName) { case LABEL_LX: verifyPartitionCapacityInfoJson(partitionInfo, 30, 0, 100, 30, 0, 100); break; case DEFAULT_PARTITION: verifyPartitionCapacityInfoJson(partitionInfo, 30, 0, 50, 30, 0, 50); break; default: fail("Unexpected partition" + partitionName); } } break; case QUEUE_C: verifyAccesibleNodeLabels(queueJson, ImmutableSet.of(LABEL_LX, LABEL_LY)); assertEquals(4, partitionsResourcesArray.getJSONObject(0).length(), "incorrect number of elements"); verifyQcPartitionsCapacityInfoJson(partitionsCapsArray, 70, 100, 70, 100, 100, 100, 100, 100, 40, 50, 40, 50); verifySubQueuesOfQc(queueJson); break; default: fail("Unexpected queue" + queue); } } } private void verifyAccesibleNodeLabels(JSONObject queueJson, Set<String> accesibleNodeLabels) throws JSONException { Object nodeLabelsObj = queueJson.get("nodeLabels"); JSONArray nodeLabels = new JSONArray(); if(nodeLabelsObj instanceof JSONArray){ nodeLabels = queueJson.getJSONArray("nodeLabels"); } else { nodeLabels.put(nodeLabelsObj); } assertEquals(accesibleNodeLabels.size(), nodeLabels.length(), "number of accessible Node Labels not matching"); for (int i = 0; i < nodeLabels.length(); i++) { assertTrue(accesibleNodeLabels.contains(nodeLabels.getString(i)), "Invalid accessible node label : " + nodeLabels.getString(i)); } } private void verifySubQueuesOfQc(JSONObject queueCJson) throws JSONException { JSONObject jsonQueuesObject = queueCJson.getJSONObject("queues"); JSONArray queuesArray = jsonQueuesObject.getJSONArray("queue"); for (int i = 0; i < queuesArray.length(); i++) { JSONObject queueJson = queuesArray.getJSONObject(i); String queue = queueJson.getString("queueName"); JSONObject capacitiesJsonObject = queueJson.getJSONObject(CAPACITIES); JSONArray partitionsCapsArray = capacitiesJsonObject.getJSONArray(QUEUE_CAPACITIES_BY_PARTITION); switch (queue) { case LEAF_QUEUE_C1: verifyAccesibleNodeLabels(queueJson, ImmutableSet.of(LABEL_LX, LABEL_LY)); assertEquals(LABEL_LX, queueJson.getString("defaultNodeLabelExpression"), "Invalid default Label expression"); verifyQcPartitionsCapacityInfoJson(partitionsCapsArray, 40, 100, 28, 100, 50, 75, 50, 75, 50, 60, 20, 30); break; case LEAF_QUEUE_C2: verifyAccesibleNodeLabels(queueJson, ImmutableSet.of(LABEL_LX, LABEL_LY)); assertEquals(LABEL_LY, queueJson.getString("defaultNodeLabelExpression"), "Invalid default Label expression"); verifyQcPartitionsCapacityInfoJson(partitionsCapsArray, 60, 100, 42, 100, 50, 75, 50, 75, 50, 70, 20, 35); break; default: fail("Unexpected queue" + queue); } } } private void verifyQcPartitionsCapacityInfoJson(JSONArray partitionsCapsArray, float lxCaps, float lxMaxCaps, float lxAbsCaps, float lxAbsMaxCaps, float lyCaps, float lyMaxCaps, float lyAbsCaps, float lyAbsMaxCaps, float defCaps, float defMaxCaps, float defAbsCaps, float defAbsMaxCaps) throws JSONException { assertEquals(CLUSTER_LABELS.size(), partitionsCapsArray.length(), "incorrect number of partitions"); for (int j = 0; j < partitionsCapsArray.length(); j++) { JSONObject partitionInfo = partitionsCapsArray.getJSONObject(j); String partitionName = partitionInfo.getString("partitionName"); switch (partitionName) { case LABEL_LX: verifyPartitionCapacityInfoJson(partitionInfo, lxCaps, 0, lxMaxCaps, lxAbsCaps, 0, lxAbsMaxCaps); break; case LABEL_LY: verifyPartitionCapacityInfoJson(partitionInfo, lyCaps, 0, lyMaxCaps, lyAbsCaps, 0, lyAbsMaxCaps); break; case DEFAULT_PARTITION: verifyPartitionCapacityInfoJson(partitionInfo, defCaps, 0, defMaxCaps, defAbsCaps, 0, defAbsMaxCaps); break; default: fail("Unexpected partition" + partitionName); } } } private void verifyPartitionCapacityInfoJson( JSONObject partitionCapacityInfoJson, float capacity, float usedCapacity, float maxCapacity, float absoluteCapacity, float absoluteUsedCapacity, float absoluteMaxCapacity) throws JSONException { assertEquals(capacity, (float) partitionCapacityInfoJson.getDouble("capacity"), EPSILON, "capacity doesn't match"); assertEquals(usedCapacity, (float) partitionCapacityInfoJson.getDouble("usedCapacity"), EPSILON, "capacity doesn't match"); assertEquals(maxCapacity, (float) partitionCapacityInfoJson.getDouble("maxCapacity"), EPSILON, "capacity doesn't match"); assertEquals(absoluteCapacity, (float) partitionCapacityInfoJson.getDouble("absoluteCapacity"), EPSILON, "capacity doesn't match"); assertEquals(absoluteUsedCapacity, (float) partitionCapacityInfoJson.getDouble("absoluteUsedCapacity"), 1e-3f, "capacity doesn't match"); assertEquals(absoluteMaxCapacity, (float) partitionCapacityInfoJson.getDouble("absoluteMaxCapacity"), 1e-3f, "capacity doesn't match"); } }
apache/jackrabbit-oak
38,093
oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/util/UtilsTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document.util; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.codec.binary.Hex; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.commons.collections.IterableUtils; import org.apache.jackrabbit.oak.commons.junit.LogCustomizer; import org.apache.jackrabbit.oak.plugins.document.ClusterNodeInfo; import org.apache.jackrabbit.oak.plugins.document.ClusterNodeInfoDocument; import org.apache.jackrabbit.oak.plugins.document.Collection; import org.apache.jackrabbit.oak.plugins.document.DocumentMK; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBuilder; import org.apache.jackrabbit.oak.plugins.document.DocumentStore; import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException; import org.apache.jackrabbit.oak.plugins.document.NodeDocument; import org.apache.jackrabbit.oak.plugins.document.Path; import org.apache.jackrabbit.oak.plugins.document.Revision; import org.apache.jackrabbit.oak.plugins.document.RevisionVector; import org.apache.jackrabbit.oak.plugins.document.UpdateOp; import org.apache.jackrabbit.oak.plugins.document.UpdateUtils; import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.toggle.Feature; import org.apache.jackrabbit.oak.stats.Clock; import org.jetbrains.annotations.Nullable; import org.junit.Ignore; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.event.Level; import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBuilder.DEFAULT_MEMORY_CACHE_SIZE; import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBuilder.DEFAULT_PREV_NO_PROP_CACHE_PERCENTAGE; import static org.apache.jackrabbit.oak.plugins.document.DocumentNodeStoreBuilder.newDocumentNodeStoreBuilder; import static org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentNodeStoreBuilder.newRDBDocumentNodeStoreBuilder; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isAvoidMergeLockEnabled; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isFullGCEnabled; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isEmbeddedVerificationEnabled; import static org.apache.jackrabbit.oak.plugins.document.util.Utils.isThrottlingEnabled; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Tests for {@link Utils}. */ public class UtilsTest { private static final long TIME_DIFF_WARN_THRESHOLD_MILLIS = 2000L; private static final String LONG_PATH = "/foo/barbar/qwerty/asdfgh/zxcvbnm/adfsuyhdgjebuuuuuuupcccccccccsdb123ceeeeeeeeeeideaallthe_rifbdjhhbgksdfdght_acbsajbvcfjdnswersfb_dvhffbjrhbfhjdbfjsideacentrefgduyfwerebhjvbrhuv_fbhefhsbjasbka/adfsuyhdgjebuuuuuuupcccccccccsdb123ceeeeeeeeeeideaallthe_rifbdjhhbgksdfdght_acbsajbvcfjdnswersfb_dvhffbjrhbfhjdbfjsideacentrefgduyfwerebhjvbrhuv_fbhefhsbjasbka/adfsuyhdgjebuuuuuuupcccccccccsdb123ceeeeeeeeeeideaallthe_rifbdjhhbgksdfdght_acbsajbvcfjdnswersfb_dvhffbjrhbfhjdbfjsideacentrefgduyfwerebhjvbrhuv_fbhefhsbjasbka"; @Test public void getPreviousIdFor() { Revision r = new Revision(System.currentTimeMillis(), 0, 0); assertEquals("2:p/" + r.toString() + "/0", Utils.getPreviousIdFor(Path.ROOT, r, 0)); assertEquals("3:p/test/" + r.toString() + "/1", Utils.getPreviousIdFor(Path.fromString("/test"), r, 1)); assertEquals("15:p/a/b/c/d/e/f/g/h/i/j/k/l/m/" + r.toString() + "/3", Utils.getPreviousIdFor(Path.fromString("/a/b/c/d/e/f/g/h/i/j/k/l/m"), r, 3)); } @Test public void previousDoc() throws Exception{ Revision r = new Revision(System.currentTimeMillis(), 0, 0); assertTrue(Utils.isPreviousDocId(Utils.getPreviousIdFor(Path.ROOT, r, 0))); assertTrue(Utils.isPreviousDocId(Utils.getPreviousIdFor(Path.fromString("/a/b/c/d/e/f/g/h/i/j/k/l/m"), r, 3))); assertFalse(Utils.isPreviousDocId(Utils.getIdFromPath("/a/b"))); assertFalse(Utils.isPreviousDocId("foo")); assertFalse(Utils.isPreviousDocId("0:")); } @Test public void leafPreviousDoc() throws Exception { Revision r = new Revision(System.currentTimeMillis(), 0, 0); assertTrue(Utils.isLeafPreviousDocId(Utils.getPreviousIdFor(Path.ROOT, r, 0))); assertTrue(Utils.isLeafPreviousDocId(Utils.getPreviousIdFor(Path.fromString("/a/b/c/d/e/f/g/h/i/j/k/l/m"), r, 0))); assertFalse(Utils.isLeafPreviousDocId(Utils.getPreviousIdFor(Path.fromString("/a/b/c/d/e/f/g/h/i/j/k/l/m"), r, 3))); assertFalse(Utils.isLeafPreviousDocId(Utils.getIdFromPath("/a/b"))); assertFalse(Utils.isLeafPreviousDocId("foo")); assertFalse(Utils.isLeafPreviousDocId("0:")); assertFalse(Utils.isLeafPreviousDocId(":/0")); } @Test public void getParentIdFromLowerLimit() throws Exception{ assertEquals("1:/foo",Utils.getParentIdFromLowerLimit(Utils.getKeyLowerLimit(Path.fromString("/foo")))); assertEquals("1:/foo",Utils.getParentIdFromLowerLimit("2:/foo/bar")); } @Test public void getParentId() throws Exception{ Path longPath = Path.fromString(PathUtils.concat("/" + "p".repeat(Utils.PATH_LONG + 1), "foo")); assertTrue(Utils.isLongPath(longPath)); assertNull(Utils.getParentId(Utils.getIdFromPath(longPath))); assertNull(Utils.getParentId(Utils.getIdFromPath(Path.ROOT))); assertEquals("1:/foo", Utils.getParentId("2:/foo/bar")); } @Test public void getParentIdForLongPathWithNodeNameLimit() { Path longPath = Path.fromString(LONG_PATH); assertTrue(Utils.isNodeNameLong(longPath, Utils.NODE_NAME_LIMIT)); } @Test public void getParentIdForLongPathWithoutNodeNameLimit() { Path longPath = Path.fromString(LONG_PATH); assertFalse(Utils.isNodeNameLong(longPath, Integer.MAX_VALUE)); assertNull(Utils.getParentId(Utils.getIdFromPath(longPath))); } @Test public void throttlingEnabledDefaultValue() { boolean throttlingEnabled = isThrottlingEnabled(newDocumentNodeStoreBuilder()); assertFalse("Throttling is disabled by default", throttlingEnabled); } @Test public void throttlingExplicitlyDisabled() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setThrottlingEnabled(false); Feature docStoreThrottlingFeature = mock(Feature.class); when(docStoreThrottlingFeature.isEnabled()).thenReturn(false); builder.setDocStoreThrottlingFeature(docStoreThrottlingFeature); boolean throttlingEnabled = isThrottlingEnabled(builder); assertFalse("Throttling is disabled explicitly", throttlingEnabled); } @Test public void throttlingEnabledViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setThrottlingEnabled(true); Feature docStoreThrottlingFeature = mock(Feature.class); when(docStoreThrottlingFeature.isEnabled()).thenReturn(false); builder.setDocStoreThrottlingFeature(docStoreThrottlingFeature); boolean throttlingEnabled = isThrottlingEnabled(builder); assertTrue("Throttling is enabled via configuration", throttlingEnabled); } @Test public void throttlingEnabledViaFeatureToggle() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setThrottlingEnabled(false); Feature docStoreThrottlingFeature = mock(Feature.class); when(docStoreThrottlingFeature.isEnabled()).thenReturn(true); builder.setDocStoreThrottlingFeature(docStoreThrottlingFeature); boolean throttlingEnabled = isThrottlingEnabled(builder); assertTrue("Throttling is enabled via Feature Toggle", throttlingEnabled); } @Test public void fullGCEnabledDefaultValue() { boolean fullGCEnabled = isFullGCEnabled(newDocumentNodeStoreBuilder()); assertFalse("Full GC is disabled by default", fullGCEnabled); } @Test public void fullGCExplicitlyDisabled() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setFullGCEnabled(false); Feature docStoreFullGCFeature = mock(Feature.class); when(docStoreFullGCFeature.isEnabled()).thenReturn(false); builder.setDocStoreFullGCFeature(docStoreFullGCFeature); boolean fullGCEnabled = isFullGCEnabled(builder); assertFalse("Full GC is disabled explicitly", fullGCEnabled); } @Test public void fullGCEnabledViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setFullGCEnabled(true); Feature docStoreFullGCFeature = mock(Feature.class); when(docStoreFullGCFeature.isEnabled()).thenReturn(false); builder.setDocStoreFullGCFeature(docStoreFullGCFeature); boolean fullGCEnabled = isFullGCEnabled(builder); assertTrue("Full GC is enabled via configuration", fullGCEnabled); } @Test public void fullGCEnabledViaFeatureToggle() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setFullGCEnabled(false); Feature docStoreFullGCFeature = mock(Feature.class); when(docStoreFullGCFeature.isEnabled()).thenReturn(true); builder.setDocStoreFullGCFeature(docStoreFullGCFeature); boolean fullGCEnabled = isFullGCEnabled(builder); assertTrue("Full GC is enabled via Feature Toggle", fullGCEnabled); } @Test public void fullGCDisabledForRDB() { DocumentNodeStoreBuilder<?> builder = newRDBDocumentNodeStoreBuilder(); builder.setFullGCEnabled(true); Feature docStoreFullGCFeature = mock(Feature.class); when(docStoreFullGCFeature.isEnabled()).thenReturn(true); builder.setDocStoreFullGCFeature(docStoreFullGCFeature); boolean fullGCEnabled = isFullGCEnabled(builder); assertFalse("Full GC is disabled for RDB Document Store", fullGCEnabled); } @Test public void avoidMergeLockEnabledDefaultValue() { boolean avoidMergeLockEnabled = isAvoidMergeLockEnabled(newDocumentNodeStoreBuilder()); assertFalse("Avoid Merge Lock is enabled by default", avoidMergeLockEnabled); } @Test public void avoidMergeLockExplicitlyDisabled() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setAvoidMergeLock(false); Feature docStoreAvoidMergeLockFeature = mock(Feature.class); when(docStoreAvoidMergeLockFeature.isEnabled()).thenReturn(false); builder.setDocStoreAvoidMergeLockFeature(docStoreAvoidMergeLockFeature); boolean avoidMergeLockEnabled = isAvoidMergeLockEnabled(builder); assertFalse("Avoid Merge Lock is disabled explicitly", avoidMergeLockEnabled); } @Test public void avoidMergeLockEnabledViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setAvoidMergeLock(true); Feature docStoreAvoidMergeLockFeature = mock(Feature.class); when(docStoreAvoidMergeLockFeature.isEnabled()).thenReturn(false); builder.setDocStoreAvoidMergeLockFeature(docStoreAvoidMergeLockFeature); boolean avoidMergeLockEnabled = isAvoidMergeLockEnabled(builder); assertTrue("Avoid Merge Lock is enabled via configuration", avoidMergeLockEnabled); } @Test public void avoidMergeLockEnabledViaFeatureToggle() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setAvoidMergeLock(false); Feature docStoreAvoidMergeLockFeature = mock(Feature.class); when(docStoreAvoidMergeLockFeature.isEnabled()).thenReturn(true); builder.setDocStoreAvoidMergeLockFeature(docStoreAvoidMergeLockFeature); boolean avoidMergeLockEnabled = isAvoidMergeLockEnabled(builder); assertTrue("Avoid Merge Lock is enabled via Feature Toggle", avoidMergeLockEnabled); } @Test public void avoidMergeLockDisabledForRDB() { DocumentNodeStoreBuilder<?> builder = newRDBDocumentNodeStoreBuilder(); builder.setAvoidMergeLock(true); Feature docStoreAvoidMergeLockFeature = mock(Feature.class); when(docStoreAvoidMergeLockFeature.isEnabled()).thenReturn(true); builder.setDocStoreAvoidMergeLockFeature(docStoreAvoidMergeLockFeature); boolean avoidMergeLockEnabled = isAvoidMergeLockEnabled(builder); assertFalse("Avoid Merge Lock is enabled for RDB Document Store", avoidMergeLockEnabled); } @Test public void fullGCModeDefaultValue() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); int fullGCModeDefaultValue = builder.getFullGCMode(); final int fullGcModeNone = 0; assertEquals("Full GC mode has NONE value by default", fullGcModeNone, fullGCModeDefaultValue); } @Test public void fullGCGenerationDefaultValue() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); long fullGCGenerationDefaultValue = builder.getFullGCGeneration(); final long fullGcgeneration = 0; assertEquals("Full GC generation has 0 value by default", fullGcgeneration, fullGCGenerationDefaultValue); } @Test public void fullGCModeSetViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); final int fullGcModeGapOrphans = 2; builder.setFullGCMode(fullGcModeGapOrphans); int fullGCModeValue = builder.getFullGCMode(); assertEquals("Full GC mode set correctly via configuration", fullGcModeGapOrphans, fullGCModeValue); } @Test public void fullGCGenerationSetViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); final long fullGcGeneration = 2; builder.setFullGCGeneration(fullGcGeneration); long fullGCGenerationValue = builder.getFullGCGeneration(); assertEquals("Full GC generation set correctly via configuration", fullGcGeneration, fullGCGenerationValue); } @Test public void fullGCModeHasDefaultValueForRDB() { DocumentNodeStoreBuilder<?> builder = newRDBDocumentNodeStoreBuilder(); builder.setFullGCMode(3); int fullGCModeValue = builder.getFullGCMode(); assertEquals("Full GC mode has default value 0 for RDB Document Store", 0, fullGCModeValue); } @Test public void fullGCGenerationHasDefaultValueForRDB() { DocumentNodeStoreBuilder<?> builder = newRDBDocumentNodeStoreBuilder(); builder.setFullGCGeneration(3); long fullGCGenerationValue = builder.getFullGCGeneration(); assertEquals("Full GC generation has default value 0 for RDB Document Store", 0, fullGCGenerationValue); } @Test public void embeddedVerificationEnabledDefaultValue() { boolean embeddedVerificationEnabled = isEmbeddedVerificationEnabled(newDocumentNodeStoreBuilder()); assertTrue("Embedded Verification is enabled by default", embeddedVerificationEnabled); } @Test public void embeddedVerificationExplicitlyDisabled() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setEmbeddedVerificationEnabled(false); Feature docStoreEmbeddedVerificationFeature = mock(Feature.class); when(docStoreEmbeddedVerificationFeature.isEnabled()).thenReturn(false); builder.setDocStoreEmbeddedVerificationFeature(docStoreEmbeddedVerificationFeature); boolean embeddedVerificationEnabled = isEmbeddedVerificationEnabled(builder); assertFalse("Embedded Verification is disabled explicitly", embeddedVerificationEnabled); } @Test public void embeddedVerificationEnabledViaConfiguration() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setEmbeddedVerificationEnabled(true); Feature docStoreEmbeddedVerificationFeature = mock(Feature.class); when(docStoreEmbeddedVerificationFeature.isEnabled()).thenReturn(false); builder.setDocStoreEmbeddedVerificationFeature(docStoreEmbeddedVerificationFeature); boolean embeddedVerificationEnabled = isEmbeddedVerificationEnabled(builder); assertTrue("Embedded Verification is enabled via configuration", embeddedVerificationEnabled); } @Test public void embeddedVerificationEnabledViaFeatureToggle() { DocumentNodeStoreBuilder<?> builder = newDocumentNodeStoreBuilder(); builder.setEmbeddedVerificationEnabled(false); Feature docStoreEmbeddedVerificationFeature = mock(Feature.class); when(docStoreEmbeddedVerificationFeature.isEnabled()).thenReturn(true); builder.setDocStoreEmbeddedVerificationFeature(docStoreEmbeddedVerificationFeature); boolean embeddedVerificationEnabled = isEmbeddedVerificationEnabled(builder); assertTrue("Embedded Verification is enabled via Feature Toggle", embeddedVerificationEnabled); } @Test public void embeddedVerificationDisabledForRDB() { DocumentNodeStoreBuilder<?> builder = newRDBDocumentNodeStoreBuilder(); builder.setEmbeddedVerificationEnabled(true); Feature docStoreEmbeddedVerificationFeature = mock(Feature.class); when(docStoreEmbeddedVerificationFeature.isEnabled()).thenReturn(true); builder.setDocStoreEmbeddedVerificationFeature(docStoreEmbeddedVerificationFeature); boolean embeddedVerificationEnabled = isEmbeddedVerificationEnabled(builder); assertFalse("Embedded Verification is disabled for RDB Document Store", embeddedVerificationEnabled); } @Test public void prevNoPropDisabledByDefault() { assertPrevNoPropDisabled(newDocumentNodeStoreBuilder()); } @Test public void prevNoPropDisabled() { assertPrevNoPropDisabled(newDocumentNodeStoreBuilder() .setPrevNoPropCacheFeature(createFeature(false))); } private void assertPrevNoPropDisabled(DocumentNodeStoreBuilder<?> builder) { assertNotNull(builder); @Nullable Feature feature = builder.getPrevNoPropCacheFeature(); if (feature != null) { assertFalse(feature.isEnabled()); } assertEquals(0, builder.getPrevNoPropCacheSize()); assertNull(builder.buildPrevNoPropCache()); } @Test public void prevNoPropEnabled() { DocumentNodeStoreBuilder<?> b = newDocumentNodeStoreBuilder().setPrevNoPropCacheFeature(createFeature(true)); assertNotNull(b); assertTrue(b.getPrevNoPropCacheFeature().isEnabled()); assertEquals(DEFAULT_MEMORY_CACHE_SIZE * DEFAULT_PREV_NO_PROP_CACHE_PERCENTAGE / 100, b.getPrevNoPropCacheSize()); assertNotNull(b.buildPrevNoPropCache()); } public static Feature createFeature(boolean enabled) { Feature f = mock(Feature.class); when(f.isEnabled()).thenReturn(enabled); return f; } @Test public void getDepthFromId() throws Exception{ assertEquals(1, Utils.getDepthFromId("1:/x")); assertEquals(2, Utils.getDepthFromId("2:/x")); assertEquals(10, Utils.getDepthFromId("10:/x")); } @Ignore("Performance test") @Test public void performance_getPreviousIdFor() { Revision r = new Revision(System.currentTimeMillis(), 0, 0); Path path = Path.fromString("/some/test/path/foo"); // warm up for (int i = 0; i < 1 * 1000 * 1000; i++) { Utils.getPreviousIdFor(path, r, 0); } long time = System.currentTimeMillis(); for (int i = 0; i < 10 * 1000 * 1000; i++) { Utils.getPreviousIdFor(path, r, 0); } time = System.currentTimeMillis() - time; System.out.println(time); } @Ignore("Performance test") @Test public void performance_revisionToString() { for (int i = 0; i < 4; i++) { performance_revisionToStringOne(); } } private static void performance_revisionToStringOne() { Revision r = new Revision(System.currentTimeMillis(), 0, 0); int dummy = 0; long time = System.currentTimeMillis(); for (int i = 0; i < 30 * 1000 * 1000; i++) { dummy += r.toString().length(); } time = System.currentTimeMillis() - time; System.out.println("time: " + time + " dummy " + dummy); } @Test public void max() { Revision a = new Revision(42, 0, 1); Revision b = new Revision(43, 0, 1); assertSame(b, Utils.max(a, b)); Revision a1 = new Revision(42, 1, 1); assertSame(a1, Utils.max(a, a1)); assertSame(a, Utils.max(a, null)); assertSame(a, Utils.max(null, a)); assertNull(Utils.max(null, null)); } @Test public void min() { Revision a = new Revision(42, 1, 1); Revision b = new Revision(43, 0, 1); assertSame(a, Utils.min(a, b)); Revision a1 = new Revision(42, 0, 1); assertSame(a1, Utils.min(a, a1)); assertSame(a, Utils.min(a, null)); assertSame(a, Utils.min(null, a)); assertNull(Utils.max(null, null)); } @Test public void getAllDocuments() throws CommitFailedException { DocumentNodeStore store = new DocumentMK.Builder().getNodeStore(); try { NodeBuilder builder = store.getRoot().builder(); for (int i = 0; i < 1000; i++) { builder.child("test-" + i); } store.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); assertEquals(1001 /* root + 1000 children */, IterableUtils.size( Utils.getAllDocuments(store.getDocumentStore()))); } finally { store.dispose(); } } @Test public void getMaxExternalRevisionTime() { int localClusterId = 1; List<Revision> revs = Collections.emptyList(); long revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(Long.MIN_VALUE, revTime); revs = List.of(Revision.fromString("r1-0-1")); revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(Long.MIN_VALUE, revTime); revs = List.of( Revision.fromString("r1-0-1"), Revision.fromString("r2-0-2")); revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(2, revTime); revs = List.of( Revision.fromString("r3-0-1"), Revision.fromString("r2-0-2")); revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(2, revTime); revs = List.of( Revision.fromString("r1-0-1"), Revision.fromString("r2-0-2"), Revision.fromString("r2-0-3")); revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(2, revTime); revs = List.of( Revision.fromString("r1-0-1"), Revision.fromString("r3-0-2"), Revision.fromString("r2-0-3")); revTime = Utils.getMaxExternalTimestamp(revs, localClusterId); assertEquals(3, revTime); } @Test public void getMinTimestampForDiff() { RevisionVector from = new RevisionVector(new Revision(17, 0, 1)); RevisionVector to = new RevisionVector(new Revision(19, 0, 1)); assertEquals(17, Utils.getMinTimestampForDiff(from, to, new RevisionVector())); assertEquals(17, Utils.getMinTimestampForDiff(to, from, new RevisionVector())); RevisionVector minRevs = new RevisionVector( new Revision(7, 0, 1), new Revision(4, 0, 2)); assertEquals(17, Utils.getMinTimestampForDiff(from, to, minRevs)); assertEquals(17, Utils.getMinTimestampForDiff(to, from, minRevs)); to = to.update(new Revision(15, 0, 2)); // must return min revision of clusterId 2 assertEquals(4, Utils.getMinTimestampForDiff(from, to, minRevs)); assertEquals(4, Utils.getMinTimestampForDiff(to, from, minRevs)); } @Test(expected = IllegalArgumentException.class) public void getDepthFromIdIllegalArgumentException1() { Utils.getDepthFromId("a:/foo"); } @Test(expected = IllegalArgumentException.class) public void getDepthFromIdIllegalArgumentException2() { Utils.getDepthFromId("42"); } @Test public void alignWithExternalRevisions() throws Exception { Clock c = new Clock.Virtual(); c.waitUntil(System.currentTimeMillis()); // past Revision lastRev1 = new Revision(c.getTime() - 1000, 0, 1); // future Revision lastRev2 = new Revision(c.getTime() + 1000, 0, 2); // create a root document NodeDocument doc = new NodeDocument(new MemoryDocumentStore(), c.getTime()); UpdateOp op = new UpdateOp(Utils.getIdFromPath("/"), true); NodeDocument.setLastRev(op, lastRev1); NodeDocument.setLastRev(op, lastRev2); UpdateUtils.applyChanges(doc, op); // must not wait even if revision is in the future Utils.alignWithExternalRevisions(doc, c, 2, TIME_DIFF_WARN_THRESHOLD_MILLIS); assertThat(c.getTime(), is(lessThan(lastRev2.getTimestamp()))); // must wait until after lastRev2 timestamp Utils.alignWithExternalRevisions(doc, c, 1, TIME_DIFF_WARN_THRESHOLD_MILLIS); assertThat(c.getTime(), is(greaterThan(lastRev2.getTimestamp()))); } @Test public void warnOnClockDifferences() throws Exception { Clock c = new Clock.Virtual(); c.waitUntil(System.currentTimeMillis()); // local Revision lastRev1 = new Revision(c.getTime(), 0, 1); // other in the future Revision lastRev2 = new Revision(c.getTime() + 5000, 0, 2); // create a root document NodeDocument doc = new NodeDocument(new MemoryDocumentStore(), c.getTime()); UpdateOp op = new UpdateOp(Utils.getIdFromPath("/"), true); NodeDocument.setLastRev(op, lastRev1); NodeDocument.setLastRev(op, lastRev2); UpdateUtils.applyChanges(doc, op); LogCustomizer customizer = LogCustomizer.forLogger(Utils.class).enable(Level.WARN).create(); customizer.starting(); try { // must wait until after lastRev2 timestamp Utils.alignWithExternalRevisions(doc, c, 1, TIME_DIFF_WARN_THRESHOLD_MILLIS); assertThat(c.getTime(), is(greaterThan(lastRev2.getTimestamp()))); assertFalse(customizer.getLogs().isEmpty()); assertThat(customizer.getLogs().iterator().next(), containsString("Detected clock differences")); } finally { customizer.finished(); } } @Test public void noWarnOnMinorClockDifferences() throws Exception { Clock c = new Clock.Virtual(); c.waitUntil(System.currentTimeMillis()); // local Revision lastRev1 = new Revision(c.getTime(), 0, 1); // other slightly in the future Revision lastRev2 = new Revision(c.getTime() + 100, 0, 2); // create a root document NodeDocument doc = new NodeDocument(new MemoryDocumentStore(), c.getTime()); UpdateOp op = new UpdateOp(Utils.getIdFromPath("/"), true); NodeDocument.setLastRev(op, lastRev1); NodeDocument.setLastRev(op, lastRev2); UpdateUtils.applyChanges(doc, op); LogCustomizer customizer = LogCustomizer.forLogger(Utils.class).enable(Level.WARN).create(); customizer.starting(); try { // must wait until after lastRev2 timestamp Utils.alignWithExternalRevisions(doc, c, 1, TIME_DIFF_WARN_THRESHOLD_MILLIS); assertThat(c.getTime(), is(greaterThan(lastRev2.getTimestamp()))); assertThat(customizer.getLogs(), empty()); } finally { customizer.finished(); } } @Test public void noWarnWithSingleClusterId() throws Exception { Clock c = new Clock.Virtual(); c.waitUntil(System.currentTimeMillis()); // local Revision lastRev1 = new Revision(c.getTime(), 0, 1); // create a root document NodeDocument doc = new NodeDocument(new MemoryDocumentStore(), c.getTime()); UpdateOp op = new UpdateOp(Utils.getIdFromPath("/"), true); NodeDocument.setLastRev(op, lastRev1); UpdateUtils.applyChanges(doc, op); LogCustomizer customizer = LogCustomizer.forLogger(Utils.class).enable(Level.WARN).create(); customizer.starting(); try { Utils.alignWithExternalRevisions(doc, c, 1, TIME_DIFF_WARN_THRESHOLD_MILLIS); assertThat(customizer.getLogs(), empty()); } finally { customizer.finished(); } } @Test public void isIdFromLongPath() { Path path = Path.fromString("/test"); while (!Utils.isLongPath(path)) { path = new Path(path, path.getName()); } String idFromLongPath = Utils.getIdFromPath(path); assertTrue(Utils.isIdFromLongPath(idFromLongPath)); assertFalse(Utils.isIdFromLongPath("foo")); assertFalse(Utils.isIdFromLongPath(NodeDocument.MIN_ID_VALUE)); assertFalse(Utils.isIdFromLongPath(NodeDocument.MAX_ID_VALUE)); assertFalse(Utils.isIdFromLongPath(":")); } @Test public void idDepth() { assertEquals(0, Utils.getIdDepth(Path.ROOT)); assertEquals(0, Utils.getIdDepth(Path.fromString("a"))); assertEquals(1, Utils.getIdDepth(Path.fromString("/a"))); assertEquals(2, Utils.getIdDepth(Path.fromString("/a/b"))); assertEquals(3, Utils.getIdDepth(Path.fromString("/a/b/c"))); assertEquals(2, Utils.getIdDepth(Path.fromString("a/b/c"))); } @Test public void encodeHexString() { Random r = new Random(42); for (int i = 0; i < 1000; i++) { int len = r.nextInt(100); byte[] data = new byte[len]; r.nextBytes(data); // compare against commons codec implementation assertEquals(Hex.encodeHexString(data), Utils.encodeHexString(data, new StringBuilder()).toString()); } } @Test public void isLocalChange() { RevisionVector empty = new RevisionVector(); Revision r11 = Revision.fromString("r1-0-1"); Revision r21 = Revision.fromString("r2-0-1"); Revision r12 = Revision.fromString("r1-0-2"); Revision r22 = Revision.fromString("r2-0-2"); assertFalse(Utils.isLocalChange(empty, empty, 1)); assertTrue(Utils.isLocalChange(empty, new RevisionVector(r11), 1)); assertFalse(Utils.isLocalChange(empty, new RevisionVector(r11), 0)); assertFalse(Utils.isLocalChange(new RevisionVector(r11), new RevisionVector(r11), 1)); assertTrue(Utils.isLocalChange(new RevisionVector(r11), new RevisionVector(r21), 1)); assertFalse(Utils.isLocalChange(new RevisionVector(r11), new RevisionVector(r11, r12), 1)); assertFalse(Utils.isLocalChange(new RevisionVector(r11, r12), new RevisionVector(r11, r12), 1)); assertFalse(Utils.isLocalChange(new RevisionVector(r11, r12), new RevisionVector(r11, r22), 1)); assertFalse(Utils.isLocalChange(new RevisionVector(r11, r12), new RevisionVector(r21, r22), 1)); assertTrue(Utils.isLocalChange(new RevisionVector(r11, r12), new RevisionVector(r21, r12), 1)); } @Test public void abortingIterableIsCloseable() throws Exception { AtomicBoolean closed = new AtomicBoolean(false); Iterable<String> iterable = CloseableIterable.wrap( Collections.emptyList(), () -> closed.set(true)); Utils.closeIfCloseable(Utils.abortingIterable(iterable, s -> true)); assertTrue(closed.get()); } @Test public void checkRevisionAge() throws Exception { DocumentStore store = new MemoryDocumentStore(); ClusterNodeInfo info = mock(ClusterNodeInfo.class); when(info.getId()).thenReturn(2); Clock clock = new Clock.Virtual(); clock.waitUntil(System.currentTimeMillis()); // store is empty -> fine Utils.checkRevisionAge(store, info, clock); UpdateOp op = new UpdateOp(Utils.getIdFromPath("/"), true); NodeDocument.setLastRev(op, new Revision(clock.getTime(), 0, 1)); assertTrue(store.create(Collection.NODES, Collections.singletonList(op))); // root document does not have a lastRev entry for clusterId 2 Utils.checkRevisionAge(store, info, clock); long lastRevTime = clock.getTime(); op = new UpdateOp(Utils.getIdFromPath("/"), false); NodeDocument.setLastRev(op, new Revision(lastRevTime, 0, 2)); assertNotNull(store.findAndUpdate(Collection.NODES, op)); // lastRev entry for clusterId 2 is older than current time Utils.checkRevisionAge(store, info, clock); // rewind time clock = new Clock.Virtual(); clock.waitUntil(lastRevTime - 1000); try { // now the check must fail Utils.checkRevisionAge(store, info, clock); fail("must fail with DocumentStoreException"); } catch (DocumentStoreException e) { assertThat(e.getMessage(), containsString("newer than current time")); } } @Test public void getStartRevisionsEmpty() { RevisionVector rv = Utils.getStartRevisions(Collections.emptyList()); assertEquals(0, rv.getDimensions()); } @Test public void getStartRevisionsSingleNode() { int clusterId = 1; long now = System.currentTimeMillis(); ClusterNodeInfoDocument info = mockedClusterNodeInfo(clusterId, now); RevisionVector rv = Utils.getStartRevisions(Collections.singleton(info)); assertEquals(1, rv.getDimensions()); Revision r = rv.getRevision(clusterId); assertNotNull(r); assertEquals(now, r.getTimestamp()); } @Test public void getStartRevisionsMultipleNodes() { int clusterId1 = 1; int clusterId2 = 2; long startTime1 = System.currentTimeMillis(); long startTime2 = startTime1 + 1000; ClusterNodeInfoDocument info1 = mockedClusterNodeInfo(clusterId1, startTime1); ClusterNodeInfoDocument info2 = mockedClusterNodeInfo(clusterId2, startTime2); RevisionVector rv = Utils.getStartRevisions(Arrays.asList(info1, info2)); assertEquals(2, rv.getDimensions()); Revision r1 = rv.getRevision(clusterId1); assertNotNull(r1); Revision r2 = rv.getRevision(clusterId2); assertNotNull(r2); assertEquals(startTime1, r1.getTimestamp()); assertEquals(startTime2, r2.getTimestamp()); } @Test public void sum() { assertEquals(0, Utils.sum()); assertEquals(42, Utils.sum(7, 15, 20)); assertEquals(-12, Utils.sum(-7, 15, -20)); assertEquals(42, Utils.sum(Long.MAX_VALUE, 43, Long.MIN_VALUE)); assertEquals(Long.MAX_VALUE, Utils.sum(Long.MAX_VALUE)); assertEquals(Long.MAX_VALUE - 1, Utils.sum(Long.MAX_VALUE, -1)); assertEquals(Long.MAX_VALUE, Utils.sum(Long.MAX_VALUE, 1)); assertEquals(Long.MIN_VALUE, Utils.sum(Long.MIN_VALUE)); assertEquals(Long.MIN_VALUE + 1, Utils.sum(Long.MIN_VALUE, 1)); assertEquals(Long.MIN_VALUE, Utils.sum(Long.MIN_VALUE, -1)); } private static ClusterNodeInfoDocument mockedClusterNodeInfo(int clusterId, long startTime) { ClusterNodeInfoDocument info = Mockito.mock(ClusterNodeInfoDocument.class); Mockito.when(info.getClusterId()).thenReturn(clusterId); Mockito.when(info.getStartTime()).thenReturn(startTime); return info; } }
googleapis/google-cloud-java
37,877
java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/CreateTagTemplateRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/datacatalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest} */ public final class CreateTagTemplateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) CreateTagTemplateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateTagTemplateRequest.newBuilder() to construct. private CreateTagTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateTagTemplateRequest() { parent_ = ""; tagTemplateId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateTagTemplateRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateTagTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateTagTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.class, com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TAG_TEMPLATE_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object tagTemplateId_ = ""; /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The tagTemplateId. */ @java.lang.Override public java.lang.String getTagTemplateId() { java.lang.Object ref = tagTemplateId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tagTemplateId_ = s; return s; } } /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for tagTemplateId. */ @java.lang.Override public com.google.protobuf.ByteString getTagTemplateIdBytes() { java.lang.Object ref = tagTemplateId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tagTemplateId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TAG_TEMPLATE_FIELD_NUMBER = 2; private com.google.cloud.datacatalog.v1beta1.TagTemplate tagTemplate_; /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the tagTemplate field is set. */ @java.lang.Override public boolean hasTagTemplate() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The tagTemplate. */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TagTemplate getTagTemplate() { return tagTemplate_ == null ? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance() : tagTemplate_; } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder getTagTemplateOrBuilder() { return tagTemplate_ == null ? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance() : tagTemplate_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getTagTemplate()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tagTemplateId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tagTemplateId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTagTemplate()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(tagTemplateId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tagTemplateId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest other = (com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getTagTemplateId().equals(other.getTagTemplateId())) return false; if (hasTagTemplate() != other.hasTagTemplate()) return false; if (hasTagTemplate()) { if (!getTagTemplate().equals(other.getTagTemplate())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + TAG_TEMPLATE_ID_FIELD_NUMBER; hash = (53 * hash) + getTagTemplateId().hashCode(); if (hasTagTemplate()) { hash = (37 * hash) + TAG_TEMPLATE_FIELD_NUMBER; hash = (53 * hash) + getTagTemplate().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateTagTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateTagTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.class, com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTagTemplateFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; tagTemplateId_ = ""; tagTemplate_ = null; if (tagTemplateBuilder_ != null) { tagTemplateBuilder_.dispose(); tagTemplateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_CreateTagTemplateRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest build() { com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest result = new com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.tagTemplateId_ = tagTemplateId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.tagTemplate_ = tagTemplateBuilder_ == null ? tagTemplate_ : tagTemplateBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getTagTemplateId().isEmpty()) { tagTemplateId_ = other.tagTemplateId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasTagTemplate()) { mergeTagTemplate(other.getTagTemplate()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getTagTemplateFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 18 case 26: { tagTemplateId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the project and the template location * [region](https://cloud.google.com/data-catalog/docs/concepts/regions. * * Example: * * * projects/{project_id}/locations/us-central1 * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object tagTemplateId_ = ""; /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The tagTemplateId. */ public java.lang.String getTagTemplateId() { java.lang.Object ref = tagTemplateId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); tagTemplateId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for tagTemplateId. */ public com.google.protobuf.ByteString getTagTemplateIdBytes() { java.lang.Object ref = tagTemplateId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); tagTemplateId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The tagTemplateId to set. * @return This builder for chaining. */ public Builder setTagTemplateId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } tagTemplateId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearTagTemplateId() { tagTemplateId_ = getDefaultInstance().getTagTemplateId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The id of the tag template to create. * </pre> * * <code>string tag_template_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for tagTemplateId to set. * @return This builder for chaining. */ public Builder setTagTemplateIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); tagTemplateId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.datacatalog.v1beta1.TagTemplate tagTemplate_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.TagTemplate, com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder, com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder> tagTemplateBuilder_; /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the tagTemplate field is set. */ public boolean hasTagTemplate() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The tagTemplate. */ public com.google.cloud.datacatalog.v1beta1.TagTemplate getTagTemplate() { if (tagTemplateBuilder_ == null) { return tagTemplate_ == null ? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance() : tagTemplate_; } else { return tagTemplateBuilder_.getMessage(); } } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTagTemplate(com.google.cloud.datacatalog.v1beta1.TagTemplate value) { if (tagTemplateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } tagTemplate_ = value; } else { tagTemplateBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTagTemplate( com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder builderForValue) { if (tagTemplateBuilder_ == null) { tagTemplate_ = builderForValue.build(); } else { tagTemplateBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeTagTemplate(com.google.cloud.datacatalog.v1beta1.TagTemplate value) { if (tagTemplateBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && tagTemplate_ != null && tagTemplate_ != com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance()) { getTagTemplateBuilder().mergeFrom(value); } else { tagTemplate_ = value; } } else { tagTemplateBuilder_.mergeFrom(value); } if (tagTemplate_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearTagTemplate() { bitField0_ = (bitField0_ & ~0x00000004); tagTemplate_ = null; if (tagTemplateBuilder_ != null) { tagTemplateBuilder_.dispose(); tagTemplateBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder getTagTemplateBuilder() { bitField0_ |= 0x00000004; onChanged(); return getTagTemplateFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder getTagTemplateOrBuilder() { if (tagTemplateBuilder_ != null) { return tagTemplateBuilder_.getMessageOrBuilder(); } else { return tagTemplate_ == null ? com.google.cloud.datacatalog.v1beta1.TagTemplate.getDefaultInstance() : tagTemplate_; } } /** * * * <pre> * Required. The tag template to create. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.TagTemplate tag_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.TagTemplate, com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder, com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder> getTagTemplateFieldBuilder() { if (tagTemplateBuilder_ == null) { tagTemplateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.TagTemplate, com.google.cloud.datacatalog.v1beta1.TagTemplate.Builder, com.google.cloud.datacatalog.v1beta1.TagTemplateOrBuilder>( getTagTemplate(), getParentForChildren(), isClean()); tagTemplate_ = null; } return tagTemplateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest) private static final com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest(); } public static com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateTagTemplateRequest> PARSER = new com.google.protobuf.AbstractParser<CreateTagTemplateRequest>() { @java.lang.Override public CreateTagTemplateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateTagTemplateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateTagTemplateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/gobblin
37,217
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/troubleshooter/IssueTestDataProvider.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.runtime.troubleshooter; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.gobblin.configuration.ErrorCategory; import org.apache.gobblin.configuration.ErrorPatternProfile; import static java.util.Collections.singletonMap; @Slf4j public class IssueTestDataProvider { // Test categories public static final List<ErrorCategory> TEST_CATEGORIES = Arrays.asList( new ErrorCategory("USER", 1), new ErrorCategory("SYSTEM INFRA", 2), new ErrorCategory("GAAS", 3), new ErrorCategory("MISC", 4), new ErrorCategory("UNKNOWN", 5), new ErrorCategory("NON FATAL", 6) ); public static final ErrorCategory TEST_DEFAULT_ERROR_CATEGORY = new ErrorCategory("UNKNOWN", 5); // Test patterns - you'll need to add all the patterns here public static final List<ErrorPatternProfile> TEST_PATTERNS = Arrays.asList( new ErrorPatternProfile(".*the namespace quota.*", "USER"), new ErrorPatternProfile(".*permission denied.*", "USER"), new ErrorPatternProfile(".*remoteexception.*read only mount point.*", "USER"), new ErrorPatternProfile(".*remoteexception: operation category write is not supported in state observer.*", "USER"), new ErrorPatternProfile(".*invalidoperationexception: alter is not possible.*", "USER"), new ErrorPatternProfile(".*the diskspace quota.*", "USER"), new ErrorPatternProfile(".*filenotfoundexception:.*", "USER"), new ErrorPatternProfile(".*does not exist.*", "USER"), new ErrorPatternProfile("remoteexception: file/directory.*does not exist", "USER"), new ErrorPatternProfile("runtimeexception: directory not found.*", "USER"), new ErrorPatternProfile(".*icebergtable.tablenotfoundexception.*", "USER"), new ErrorPatternProfile("ioexception: topath.*must be an ancestor of frompath.*", "USER"), new ErrorPatternProfile("ioexception: origin path.*does not exist.*", "USER"), new ErrorPatternProfile("illegalargumentexception: missing source iceberg table.*", "USER"), new ErrorPatternProfile("remoteexception:.*already exists \\| failed to send re-scaling directive.*", "USER"), new ErrorPatternProfile("illegalargumentexception: when replacing prefix, all locations must be descendants of the prefix.*", "GAAS"), new ErrorPatternProfile("illegalargumentexception:.*", "MISC"), new ErrorPatternProfile(".*remoteexception: server too busy.*", "SYSTEM INFRA"), new ErrorPatternProfile("jschexception: session is down.*", "SYSTEM INFRA"), new ErrorPatternProfile("jschexception: channel request.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*safemodeexception.*", "SYSTEM INFRA"), new ErrorPatternProfile("remoteexception: no namenode available to invoke.*", "SYSTEM INFRA"), new ErrorPatternProfile("remoteexception: org.apache.hadoop.hdfs.server.federation.router.nonamenodesavailableexception: no namenodes available under nameservice.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*no namenodes available under nameservice.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*sockettimeoutexception.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*socketexception: connection reset.*", "SYSTEM INFRA"), new ErrorPatternProfile("timeoutexception: failed to update metadata after 60000 ms.*", "SYSTEM INFRA"), new ErrorPatternProfile("networkexception: the server disconnected before a response was received.*", "SYSTEM INFRA"), new ErrorPatternProfile("webclientrequestwithmessageexception: handshake timed out after 10000ms.*", "SYSTEM INFRA"), new ErrorPatternProfile("timeoutfailure: message='activity heartbeat timeout.*", "GAAS"), new ErrorPatternProfile("timeoutexception: failed to allocate memory within the configured max blocking time.*", "GAAS"), new ErrorPatternProfile("timeoutfailure: message='activity starttoclose timeout'.*", "GAAS"), new ErrorPatternProfile("timeoutexception: topic gobblintrackingevent_sgs not present in metadata after 60000 ms.*", "GAAS"), new ErrorPatternProfile("connectexception: connection refused.*", "SYSTEM INFRA"), new ErrorPatternProfile("connectionisclosedexception: no operations allowed after connection closed.*", "SYSTEM INFRA"), new ErrorPatternProfile("ioexception: failed to get connection for.*is already stopped.*", "SYSTEM INFRA"), new ErrorPatternProfile("ioexception: unable to close file because dfsclient was unable to contact the hdfs servers.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*hikaripool-.*-datasourceprovider - failed to execute connection test query.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*interruptedioexception.*", "MISC"), new ErrorPatternProfile("interruptedexception: sleep interrupted.*", "MISC"), new ErrorPatternProfile("closedbyinterruptexception.*", "MISC"), new ErrorPatternProfile("interruptedexception: \\| failed to commit writer for partition.*", "MISC"), new ErrorPatternProfile(".*http connection failed for getting token from azuread.*", "USER"), new ErrorPatternProfile("ioexception: invalid token.*", "USER"), new ErrorPatternProfile(".*remoteexception: token.*can't be found in cache.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*applicationfailure:.*token.*can't be found in cache.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*closedchannelexception.*", "MISC"), new ErrorPatternProfile("applicationfailure: message='task failed: org.apache.gobblin.runtime.forkexception.*", "MISC"), new ErrorPatternProfile(".*eofexception.*", "MISC"), new ErrorPatternProfile("canceledfailure: child canceled.*", "MISC"), new ErrorPatternProfile(".*message='unable to create new native thread'.*", "GAAS"), new ErrorPatternProfile("ioexception: failed to register hive spec simplehivespec.*", "USER"), new ErrorPatternProfile("ioexception: error: likely concurrent writing to destination: \\(just prior\\) tablemetadata.*", "MISC"), new ErrorPatternProfile(".*nullpointerexception:.*", "MISC"), new ErrorPatternProfile(".*classnotfoundexception.*", "MISC"), new ErrorPatternProfile(".*noclassdeffounderror: could not initialize class.*", "MISC"), new ErrorPatternProfile(".*failure in getting work units for job.*", "MISC"), new ErrorPatternProfile(".*operation failed: \"the uploaded data is not contiguous or the position query parameter value is not equal to the length of the file after appending the uploaded data.*", "MISC"), new ErrorPatternProfile(".*applicationfailure: message='task failed: operation failed: \"the uploaded data is not contiguous or the position query parameter value is not equal to the length of the file after appending the uploaded data.*", "MISC"), new ErrorPatternProfile(".*could not create work unit to deregister partition.*", "MISC"), new ErrorPatternProfile("runtimeexception: not committing dataset.*", "MISC"), new ErrorPatternProfile("job.*has unfinished commit sequences. will not clean up staging data", "MISC"), new ErrorPatternProfile(".*failed to commit.*dataset.*", "MISC"), new ErrorPatternProfile(".*could not commit file.*", "MISC"), new ErrorPatternProfile("runtimeexception: failed to parse the date.*", "USER"), new ErrorPatternProfile("runtimeexception: clean failed for one or more datasets.*", "MISC"), new ErrorPatternProfile("runtimeexception: not copying.*", "MISC"), new ErrorPatternProfile(".*failed to read from all available datanodes.*", "NON FATAL"), new ErrorPatternProfile(".*failed to prepare extractor: error - failed to get schema for this object.*", "NON FATAL"), new ErrorPatternProfile(".*failed to get primary group for kafkaetl, using user name as primary group name.*", "NON FATAL"), new ErrorPatternProfile(".*failed to delete.*", "NON FATAL"), new ErrorPatternProfile(".*applicationfailure: message='task failed: java.lang.arrayindexoutofboundsexception.*", "NON FATAL"), new ErrorPatternProfile(".*arrayindexoutofboundsexception.*failed to close all open resources", "NON FATAL"), new ErrorPatternProfile("ioexception: failed to clean up all writers. \\| failed to close all open resources.*", "MISC"), new ErrorPatternProfile("applicationfailure:.*failed to clean up all writers.*", "MISC"), new ErrorPatternProfile("applicationfailure: message='task failed: java.io.ioexception: failed to commit all writers..*", "MISC"), new ErrorPatternProfile(".*metaexception.*", "USER"), new ErrorPatternProfile(".*ioexception: filesystem closed.*", "GAAS"), new ErrorPatternProfile(".*source and target table are not compatible.*", "USER"), new ErrorPatternProfile("hivetablelocationnotmatchexception: desired target location.*do not agree.*", "USER"), new ErrorPatternProfile(".*schema mismatch between metadata.*", "USER"), new ErrorPatternProfile(".*could not read all task state files.*", "MISC"), new ErrorPatternProfile("taskstatestore successfully opened, but no task states found under.*", "MISC"), new ErrorPatternProfile(".*setting task state to failed.*", "MISC"), new ErrorPatternProfile("illegalstateexception: expected end_array.*", "GAAS"), new ErrorPatternProfile("illegalstateexception: cannot perform operation after producer has been closed.*", "GAAS"), new ErrorPatternProfile("webclientresponse.*503 service unavailable.*", "SYSTEM INFRA"), new ErrorPatternProfile("webclientresponseexception.*504.*", "SYSTEM INFRA"), new ErrorPatternProfile("webclientresponsewithmessageexception.*504.*", "SYSTEM INFRA"), new ErrorPatternProfile("webclientresponseexception.*400.*", "USER"), new ErrorPatternProfile("webclientresponseexception.*409.*", "USER"), new ErrorPatternProfile("webclientresponsewithmessageexception.*402.*", "USER"), new ErrorPatternProfile(".*fsstatestore.*", "MISC"), new ErrorPatternProfile("sqltransientconnectionexception:.*connection is not available, request timed out after.*", "SYSTEM INFRA"), new ErrorPatternProfile("sqlnontransientconnectionexception: got timeout reading communication packets.*", "SYSTEM INFRA"), new ErrorPatternProfile("sqlexception: unsupported transaction isolation level '-1'.*", "SYSTEM INFRA"), new ErrorPatternProfile("sqlexception: incorrect string value:.*for.*", "USER"), new ErrorPatternProfile(".*obstateutils.opentaskstatestoreuncached debug.*", "MISC"), new ErrorPatternProfile("[bug] workflow thread.*can't be destroyed in time. this will lead to a workflow cache leak. this problem is usually caused by a workflow implementation swallowing java.lang.error instead of rethrowing it.*", "SYSTEM INFRA"), new ErrorPatternProfile(".*outofmemoryerror.*", "GAAS"), new ErrorPatternProfile("filealreadyexistsexception: rename destination.*", "NON FATAL"), new ErrorPatternProfile("applicationfailure: message='failed to rename.*", "MISC"), new ErrorPatternProfile("potentialdeadlockexception: potential deadlock detected.*", "GAAS"), new ErrorPatternProfile("workflow lock for the run id hasn't been released by one of previous execution attempts, consider increasing workflow task timeout.*", "GAAS"), new ErrorPatternProfile("nosuchelementexception: \\| failed to commit writer for partition.*", "MISC"), new ErrorPatternProfile("illegalargumentexception: cannot instantiate jdbcpublisher since it does not extend singletaskdatapublisher.*", "MISC"), new ErrorPatternProfile("processing failure:.*retrystate=retry_state.*commit workflow failure", "MISC"), new ErrorPatternProfile(".*recordtoolargeexception.*", "MISC"), new ErrorPatternProfile("ioexception: could not get block locations. source file", "MISC"), new ErrorPatternProfile("ioexception: job status not available \\| failed to launch and run job.*", "MISC"), new ErrorPatternProfile("enqueuing of event.*timed out. sending of events is probably stuck", "MISC"), new ErrorPatternProfile("applicationfailure: message='task failed: status code: -1 error code.*", "MISC"), new ErrorPatternProfile("applicationfailure: message='failing in submitting at least one task before execution.'.*", "MISC"), new ErrorPatternProfile(".*watermark type simple not recognized.*", "MISC") ); public static List<ErrorPatternProfile> getSortedPatterns() { Map<String, Integer> categoryPriority = new HashMap<>(); for (ErrorCategory errorCategory : TEST_CATEGORIES) { categoryPriority.put(errorCategory.getCategoryName(), errorCategory.getPriority()); } List<ErrorPatternProfile> sortedPatterns = new ArrayList<>(TEST_PATTERNS); sortedPatterns.sort(Comparator.comparingInt(e -> categoryPriority.getOrDefault(e.getCategoryName(), Integer.MAX_VALUE))); return sortedPatterns; } public static List<Issue> testUserCategoryIssues() { log.info("Generating test data for USER category issues"); List<Issue> issues = new ArrayList<>(); // Namespace Quota issues issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "ABCD123", "Job failed due to the namespace quota being exceeded for user data", "ContextA", "namespaceQuotaError", "quotaexception", singletonMap("keyA", "valueA"))); // Permission Issues issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "ABCD123", "Task failed with permission denied when accessing secure directory", "ContextB", "permissionError", "securityexception", singletonMap("keyB", "valueB"))); issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "ABCD123", "RemoteException: operation category write is not supported in state observer mode", "ContextC", "writePermissionError", "remoteexception", singletonMap("keyC", "valueC"))); // Diskspace Quota Issues issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "ABCD123", "Operation failed because the diskspace quota has been reached", "ContextD", "diskQuotaError", "quotaexception", singletonMap("keyD", "valueD"))); // File Not Found Issues issues.add(new Issue(ZonedDateTime.now().minusHours(4), IssueSeverity.ERROR, "ABCD123", "FileNotFoundException: /path/to/missing/file.txt not found", "ContextE", "fileNotFoundError", "filenotfoundexception", singletonMap("keyE", "valueE"))); issues.add(new Issue(ZonedDateTime.now().minusHours(5), IssueSeverity.ERROR, "ABCD123", "RemoteException: file/directory /data/temp does not exist", "ContextF", "directoryNotFoundError", "remoteexception", singletonMap("keyF", "valueF"))); // Token Issues issues.add(new Issue(ZonedDateTime.now().minusHours(6), IssueSeverity.ERROR, "ABCD123", "HTTP connection failed for getting token from AzureAD authentication service", "ContextG", "tokenError", "authexception", singletonMap("keyG", "valueG"))); return issues; } public static List<Issue> testSystemInfraCategoryIssues() { // Returns a list of Issues that should match SYSTEM INFRA category regex patterns log.info("Generating test data for SYSTEM_INFRA category issues"); List<Issue> issues = new ArrayList<>(); // Server Issues issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "SYSTEM_INFRA", "RemoteException: server too busy to handle request at this time", "ContextA", "serverBusyError", "remoteexception", singletonMap("keyA", "valueA"))); issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "SYSTEM_INFRA", "JSchException: session is down and cannot be restored", "ContextB", "sessionDownError", "jschexception", singletonMap("keyB", "valueB"))); // Namenode Issues issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "SYSTEM_INFRA", "SafeModeException: Name node is in safe mode and cannot accept changes", "ContextC", "safeModeError", "safemodeexception", singletonMap("keyC", "valueC"))); issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "SYSTEM_INFRA", "RemoteException: no namenode available to invoke for cluster operations", "ContextD", "namenodeError", "remoteexception", singletonMap("keyD", "valueD"))); // Timeout Issues issues.add(new Issue(ZonedDateTime.now().minusHours(4), IssueSeverity.ERROR, "SYSTEM_INFRA", "SocketTimeoutException: Read timeout after 30000ms waiting for response", "ContextE", "timeoutError", "sockettimeoutexception", singletonMap("keyE", "valueE"))); issues.add(new Issue(ZonedDateTime.now().minusHours(5), IssueSeverity.ERROR, "SYSTEM_INFRA", "SocketException: connection reset by peer during data transfer", "ContextF", "connectionResetError", "socketexception", singletonMap("keyF", "valueF"))); // Connection Issues issues.add(new Issue(ZonedDateTime.now().minusHours(6), IssueSeverity.ERROR, "SYSTEM_INFRA", "ConnectException: connection refused when trying to connect to remote service", "ContextG", "connectionRefusedError", "connectexception", singletonMap("keyG", "valueG"))); return issues; } public static List<Issue> testGaasCategoryIssues() { // Returns a list of Issues that should match GAAS category regex patterns log.info("Generating test data for GAAS category issues"); List<Issue> issues = new ArrayList<>(); String testString = "TimeoutFailure: message='activity heartbeat timeout exceeded configured limit'"; // TBD: DELETE // Timeout Issues issues.add( new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "GAAS", testString, "ContextA", "activityTimeoutError", "timeoutfailure", singletonMap("keyA", "valueA"))); issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "GAAS", "TimeoutException: failed to allocate memory within the configured max blocking time of 5000ms", "ContextB", "memoryTimeoutError", "timeoutexception", singletonMap("keyB", "valueB"))); // Thread Issues issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "GAAS", "ApplicationFailure: message='unable to create new native thread - system limits reached'", "ContextC", "threadCreationError", "applicationfailure", singletonMap("keyC", "valueC"))); // Filesystem Issues issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "GAAS", "IOException: filesystem closed unexpectedly during operation", "ContextD", "filesystemClosedError", "ioexception", singletonMap("keyD", "valueD"))); // Illegal State Issues issues.add(new Issue(ZonedDateTime.now().minusHours(4), IssueSeverity.ERROR, "GAAS", "IllegalStateException: expected END_ARRAY but found different token type", "ContextE", "illegalStateError", "illegalstateexception", singletonMap("keyE", "valueE"))); return issues; } public static List<Issue> testMiscCategoryIssues() { // Returns a list of Issues that should match MISC category regex patterns log.info("Generating test data for MISC category issues"); List<Issue> issues = new ArrayList<>(); // Interruption Issues issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.WARN, "MISC", "InterruptedIOException occurred during file operation", "ContextA", "interruptedIOError", "interruptedioexception", singletonMap("keyA", "valueA"))); issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.WARN, "MISC", "InterruptedException: sleep interrupted by external signal", "ContextB", "sleepInterruptedError", "interruptedexception", singletonMap("keyB", "valueB"))); // Null Pointer Issues issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "MISC", "NullPointerException: attempt to invoke method on null object reference", "ContextC", "nullPointerError", "nullpointerexception", singletonMap("keyC", "valueC"))); // Class Not Found Issues issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "MISC", "ClassNotFoundException: com.example.MissingClass not found in classpath", "ContextD", "classNotFoundError", "classnotfoundexception", singletonMap("keyD", "valueD"))); // Concurrent Write Issues issues.add(new Issue(ZonedDateTime.now().minusHours(4), IssueSeverity.ERROR, "MISC", "IOException: error: likely concurrent writing to destination: (just prior) TableMetadata collision detected", "ContextE", "concurrentWriteError", "ioexception", singletonMap("keyE", "valueE"))); return issues; } public static List<Issue> testNonFatalCategoryIssues() { log.info("Generating test data for NON FATAL category issues"); // Returns a list of Issues that should match NON FATAL category regex patterns List<Issue> issues = new ArrayList<>(); issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "NON FATAL", "filealreadyexistsexception: rename destination ", "ContextA", "copyDataWarning", "copydatapublisher", singletonMap("keyA", "valueA"))); issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "NON FATAL", "Warning: failed to read from all available datanodes, retrying with backup nodes", "ContextB", "datanodeReadWarning", "ioexception", singletonMap("keyB", "valueB"))); issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "NON FATAL", "Info: failed to delete temporary file - cleanup will retry later", "ContextC", "deleteFailureInfo", "ioexception", singletonMap("keyC", "valueC"))); issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "NON FATAL", "1234: arrayindexoutofboundsexception. Operation failed to close all open resources", "ContextD", "reporterException", "reporterexception", singletonMap("keyD", "valueD"))); return issues; } public static List<Issue> testNegativeMatchCases() { log.info("Generating test data for negative match cases"); // Returns a list of Issues that should NOT match any specific regex patterns List<Issue> issues = new ArrayList<>(); // Generic success messages issues.add( new Issue(ZonedDateTime.now(), IssueSeverity.INFO, "SUCCESS", "Job completed successfully without any errors", "ContextA", "successInfo", "success", singletonMap("keyA", "valueA"))); // Custom application errors not in regex patterns issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "CUSTOM", "CustomBusinessLogicException: validation failed for user input", "ContextB", "customError", "businessexception", singletonMap("keyB", "valueB"))); // Generic warnings that don't match patterns issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.WARN, "GENERIC", "Process completed with warnings but no critical errors detected", "ContextC", "genericWarning", "warning", singletonMap("keyC", "valueC"))); // Unrelated error messages issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "UNRELATED", "Database connection pool exhausted - increase pool size configuration", "ContextD", "poolError", "poolexception", singletonMap("keyD", "valueD"))); return issues; } public static List<Issue> testEdgeCasesAndBoundaryConditions() { log.info("Generating test data for edge cases and boundary conditions"); // Returns a list of Issues to test edge cases and boundary conditions List<Issue> issues = new ArrayList<>(); // Case sensitivity tests issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "CASE_TEST", "FILENOTFOUNDEXCEPTION: uppercase version of file not found error", "ContextA", "uppercaseError", "filenotfoundexception", singletonMap("keyA", "valueA"))); // Partial pattern matches issues.add(new Issue(ZonedDateTime.now().minusDays(1), IssueSeverity.ERROR, "PARTIAL_TEST", "This is a permission error but not exactly permission denied", "ContextB", "partialPermissionError", "securityexception", singletonMap("keyB", "valueB"))); // Multiple pattern matches in single description issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "MULTI_MATCH", "FileNotFoundException: permission denied when accessing the namespace quota restricted file", "ContextC", "multiMatchError", "complexexception", singletonMap("keyC", "valueC"))); // Empty and null-like descriptions issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.ERROR, "EMPTY_TEST", "", "ContextD", "emptyDescriptionError", "unknownexception", singletonMap("keyD", "valueD"))); issues.add(new Issue(ZonedDateTime.now().minusHours(4), IssueSeverity.ERROR, "NULL_TEST", null, "ContextE", "nullDescriptionError", "unknownexception", singletonMap("keyE", "valueE"))); // Very long descriptions issues.add(new Issue(ZonedDateTime.now().minusHours(5), IssueSeverity.ERROR, "LONG_TEST", "This is a very long error description that contains the phrase 'permission denied' somewhere in the middle of a lot of other text that might make pattern matching more challenging to ensure our regex patterns work correctly even with verbose error messages", "ContextF", "longDescriptionError", "verboseexception", singletonMap("keyF", "valueF"))); return issues; } public static List<Issue> testNonFatalAndUnknownMix() { // Returns a list of Issues that should match NON FATAL and UNKNOWN categories List<Issue> issues = new ArrayList<>(); // NON FATAL issues issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "NON FATAL", "applicationfailure: message='task failed: java.lang.arrayindexoutofboundsexception" + "ues", "ContextA", "copyDataWarning", "copydatapublisher", singletonMap("keyA", "valueA"))); issues.add(new Issue(ZonedDateTime.now().minusHours(1), IssueSeverity.ERROR, "NON FATAL", "Info: failed to delete temporary file - cleanup will retry later", "ContextB", "deleteFailureInfo", "ioexception", singletonMap("keyB", "valueB"))); // UNKNOWN issues (should not match any known category) issues.add(new Issue(ZonedDateTime.now().minusHours(2), IssueSeverity.ERROR, "UNKNOWN", "CustomApplicationException: unexpected error occurred in module", "ContextC", "customError", "customexception", singletonMap("keyC", "valueC"))); issues.add(new Issue(ZonedDateTime.now().minusHours(3), IssueSeverity.WARN, "UNKNOWN", "Process completed with warnings but no critical errors detected", "ContextD", "genericWarning", "warning", singletonMap("keyD", "valueD"))); return issues; } public static List<Issue> testMaximumErrorCountExceeded() { // Returns a list of Issues where the number of errors exceeds the maximum allowed count // This should trigger overflow/limit logic to handle excessive error volumes List<Issue> issues = new ArrayList<>(); // Add 15 USER errors (highest priority) for (int i = 0; i < 15; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(i), IssueSeverity.ERROR, "USER", "FileNotFoundException: /path/to/missing/file" + i + ".txt not found", "ContextU" + i, "fileNotFoundError" + i, "filenotfoundexception", singletonMap("keyU" + i, "valueU" + i))); } // Add 15 SYSTEM INFRA errors for (int i = 0; i < 15; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(15 + i), IssueSeverity.ERROR, "SYSTEM_INFRA", "RemoteException: server too busy to handle request " + i, "ContextS" + i, "serverBusyError" + i, "remoteexception", singletonMap("keyS" + i, "valueS" + i))); } // Add 15 GAAS errors for (int i = 0; i < 15; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(30 + i), IssueSeverity.ERROR, "GAAS", "TimeoutFailure: message='activity heartbeat timeout " + i + "'", "ContextG" + i, "timeoutError" + i, "timeoutfailure", singletonMap("keyG" + i, "valueG" + i))); } // Add 15 MISC errors for (int i = 0; i < 15; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(45 + i), IssueSeverity.ERROR, "MISC", "NullPointerException: null reference in operation " + i, "ContextM" + i, "nullPointerError" + i, "nullpointerexception", singletonMap("keyM" + i, "valueM" + i))); } // Add 10 NON FATAL errors (lower priority) for (int i = 0; i < 10; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(60 + i), IssueSeverity.WARN, "NON FATAL", "Warning: failed to delete temporary file " + i + " - cleanup will retry later", "ContextN" + i, "deleteFailureWarning" + i, "ioexception", singletonMap("keyN" + i, "valueN" + i))); } return issues; } public static List<Issue> testMinimumErrorCountBelowThreshold() { // Returns a list of Issues where the number of errors is less than the maximum threshold required for a category // This should not trigger category match or should be handled gracefully List<Issue> issues = new ArrayList<>(); // Single error from each category - assuming maximum threshold is > 1 // These should either not be classified or handled with special logic // Single USER error issues.add(new Issue(ZonedDateTime.now(), IssueSeverity.ERROR, "USER", "FileNotFoundException: /single/missing/file.txt not found", "ContextU1", "singleFileNotFoundError", "filenotfoundexception", singletonMap("keyU1", "valueU1"))); // Single SYSTEM INFRA error issues.add(new Issue(ZonedDateTime.now().minusMinutes(5), IssueSeverity.ERROR, "SYSTEM_INFRA", "RemoteException: server too busy - single occurrence", "ContextS1", "singleServerBusyError", "remoteexception", singletonMap("keyS1", "valueS1"))); // Single GAAS error issues.add(new Issue(ZonedDateTime.now().minusMinutes(10), IssueSeverity.ERROR, "GAAS", "TimeoutFailure: message='single activity heartbeat timeout'", "ContextG1", "singleTimeoutError", "timeoutfailure", singletonMap("keyG1", "valueG1"))); // Single MISC error issues.add(new Issue(ZonedDateTime.now().minusMinutes(15), IssueSeverity.ERROR, "MISC", "NullPointerException: single null reference occurrence", "ContextM1", "singleNullPointerError", "nullpointerexception", singletonMap("keyM1", "valueM1"))); // Single NON FATAL error issues.add(new Issue(ZonedDateTime.now().minusMinutes(20), IssueSeverity.ERROR, "NON FATAL", "Warning: single failed delete operation - cleanup will retry", "ContextN1", "singleDeleteFailureWarning", "ioexception", singletonMap("keyN1", "valueN1"))); return issues; } public static List<Issue> testMixedCategoryIssuesFromLowestToHighestPriority() { // Returns a list of Issues exceeding the display/processing limit, ordered from lowest to highest priority List<Issue> issues = new ArrayList<>(); // Add 5 UNKNOWN (lowest priority) - errors that don't match any regex patterns String[] unknownErrors = {"CustomBusinessLogicException: validation failed for user input", "DatabaseConnectionException: connection pool exhausted", "ConfigurationException: invalid property value detected", "SecurityException: authentication failed with external service", "NetworkException: unable to reach external API endpoint"}; for (int i = 0; i < 5; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(60 + i), IssueSeverity.INFO, "UNKNOWN", unknownErrors[i], "ContextU" + i, "unknownError" + i, "unknownexception", singletonMap("keyU" + i, "valueU" + i))); } // Add 5 NON FATAL - errors that match NON FATAL regex patterns String[] nonFatalErrors = {"CopyDataPublisher warning: srcfs operation completed with minor issues", "Warning: failed to read from all available datanodes, retrying with backup nodes", "Info: failed to delete temporary file - cleanup will retry later", "Exception thrown from InGraphsReporter#report. Exception was suppressed and processing continued", "ApplicationFailure: message='task failed: java.lang.ArrayIndexOutOfBoundsException at index 5'"}; for (int i = 0; i < 5; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(50 + i), IssueSeverity.WARN, "NON FATAL", nonFatalErrors[i], "ContextN" + i, "nonFatalError" + i, "nonfatalexception", singletonMap("keyN" + i, "valueN" + i))); } // Add 5 MISC - errors that match MISC regex patterns String[] miscErrors = {"InterruptedIOException occurred during file operation", "NullPointerException: attempt to invoke method on null object reference", "ClassNotFoundException: com.example.MissingClass not found in classpath", "IOException: error: likely concurrent writing to destination: (just prior) TableMetadata collision detected", "ApplicationFailure: failed to clean up all writers during shutdown"}; for (int i = 0; i < 5; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(40 + i), IssueSeverity.ERROR, "MISC", miscErrors[i], "ContextM" + i, "miscError" + i, "miscexception", singletonMap("keyM" + i, "valueM" + i))); } // Add 5 GAAS - errors that match GAAS regex patterns String[] gaasErrors = {"TimeoutFailure: message='activity heartbeat timeout exceeded configured limit'", "TimeoutException: failed to allocate memory within the configured max blocking time of 5000ms", "ApplicationFailure: message='unable to create new native thread - system limits reached'", "IOException: filesystem closed unexpectedly during operation", "IllegalStateException: expected END_ARRAY but found different token type"}; for (int i = 0; i < 5; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(30 + i), IssueSeverity.ERROR, "GAAS", gaasErrors[i], "ContextG" + i, "gaasError" + i, "gaasexception", singletonMap("keyG" + i, "valueG" + i))); } // Add 5 SYSTEM INFRA - errors that match SYSTEM INFRA regex patterns String[] systemInfraErrors = {"RemoteException: server too busy to handle request at this time", "JSchException: session is down and cannot be restored", "SafeModeException: Name node is in safe mode and cannot accept changes", "SocketTimeoutException: Read timeout after 30000ms waiting for response", "ConnectException: connection refused when trying to connect to remote service"}; for (int i = 0; i < 5; i++) { issues.add( new Issue(ZonedDateTime.now().minusMinutes(20 + i), IssueSeverity.ERROR, "SYSTEM_INFRA", systemInfraErrors[i], "ContextS" + i, "systemInfraError" + i, "systeminfraexception", singletonMap("keyS" + i, "valueS" + i))); } // Add 5 USER (highest priority) - errors that match USER regex patterns String[] userErrors = {"Job failed due to the namespace quota being exceeded for user data", "Task failed with permission denied when accessing secure directory", "Operation failed because the diskspace quota has been reached", "FileNotFoundException: /path/to/missing/file.txt not found", "HTTP connection failed for getting token from AzureAD authentication service"}; for (int i = 0; i < 5; i++) { issues.add(new Issue(ZonedDateTime.now().minusMinutes(10 + i), IssueSeverity.ERROR, "USER", userErrors[i], "ContextUR" + i, "userError" + i, "userexception", singletonMap("keyUR" + i, "valueUR" + i))); } return issues; } }
apache/incubator-gluten
38,175
gluten-flink/runtime/src/main/java/org/apache/flink/streaming/runtime/tasks/OperatorChain.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.tasks; import org.apache.gluten.streaming.runtime.tasks.GlutenOutputCollector; import org.apache.gluten.util.Utils; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.metrics.groups.OperatorMetricGroup; import org.apache.flink.runtime.checkpoint.CheckpointException; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.StateObjectCollection; import org.apache.flink.runtime.checkpoint.channel.ChannelStateWriter; import org.apache.flink.runtime.event.AbstractEvent; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.io.network.api.StopMode; import org.apache.flink.runtime.io.network.api.writer.RecordWriter; import org.apache.flink.runtime.io.network.api.writer.RecordWriterDelegate; import org.apache.flink.runtime.io.network.partition.consumer.IndexedInputGate; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.metrics.MetricNames; import org.apache.flink.runtime.metrics.groups.InternalOperatorMetricGroup; import org.apache.flink.runtime.metrics.groups.TaskIOMetricGroup; import org.apache.flink.runtime.operators.coordination.AcknowledgeCheckpointEvent; import org.apache.flink.runtime.operators.coordination.OperatorEvent; import org.apache.flink.runtime.operators.coordination.OperatorEventDispatcher; import org.apache.flink.runtime.plugable.SerializationDelegate; import org.apache.flink.runtime.state.CheckpointStreamFactory; import org.apache.flink.runtime.state.SnapshotResult; import org.apache.flink.streaming.api.graph.NonChainedOutput; import org.apache.flink.streaming.api.graph.StreamConfig; import org.apache.flink.streaming.api.graph.StreamEdge; import org.apache.flink.streaming.api.operators.BoundedMultiInput; import org.apache.flink.streaming.api.operators.CountingOutput; import org.apache.flink.streaming.api.operators.Input; import org.apache.flink.streaming.api.operators.MultipleInputStreamOperator; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.operators.OperatorSnapshotFutures; import org.apache.flink.streaming.api.operators.Output; import org.apache.flink.streaming.api.operators.SourceOperator; import org.apache.flink.streaming.api.operators.StreamOperator; import org.apache.flink.streaming.api.operators.StreamOperatorFactory; import org.apache.flink.streaming.api.operators.StreamOperatorFactoryUtil; import org.apache.flink.streaming.api.operators.StreamTaskStateInitializer; import org.apache.flink.streaming.runtime.io.RecordWriterOutput; import org.apache.flink.streaming.runtime.io.StreamTaskSourceInput; import org.apache.flink.streaming.runtime.operators.sink.SinkWriterOperatorFactory; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorFactory; import org.apache.flink.util.CollectionUtil; import org.apache.flink.util.FlinkException; import org.apache.flink.util.OutputTag; import org.apache.flink.util.SerializedValue; import org.apache.flink.shaded.guava31.com.google.common.io.Closer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Supplier; import java.util.stream.Collectors; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** * The {@code OperatorChain} contains all operators that are executed as one chain within a single * {@link StreamTask}. * * <p>The main entry point to the chain is it's {@code mainOperator}. {@code mainOperator} is * driving the execution of the {@link StreamTask}, by pulling the records from network inputs * and/or source inputs and pushing produced records to the remaining chained operators. * * @param <OUT> The type of elements accepted by the chain, i.e., the input type of the chain's main * operator. */ public abstract class OperatorChain<OUT, OP extends StreamOperator<OUT>> implements BoundedMultiInput, Closeable { private static final Logger LOG = LoggerFactory.getLogger(OperatorChain.class); protected final RecordWriterOutput<?>[] streamOutputs; protected final WatermarkGaugeExposingOutput<StreamRecord<OUT>> mainOperatorOutput; /** * For iteration, {@link StreamIterationHead} and {@link StreamIterationTail} used for executing * feedback edges do not contain any operators, in which case, {@code mainOperatorWrapper} and * {@code tailOperatorWrapper} are null. * * <p>Usually first operator in the chain is the same as {@link #mainOperatorWrapper}, but that's * not the case if there are chained source inputs. In this case, one of the source inputs will be * the first operator. For example the following operator chain is possible: * * <pre> * first * \ * main (multi-input) -> ... -> tail * / * second * </pre> * * <p>Where "first" and "second" (there can be more) are chained source operators. When it comes * to things like closing, stat initialisation or state snapshotting, the operator chain is * traversed: first, second, main, ..., tail or in reversed order: tail, ..., main, second, first */ @Nullable protected final StreamOperatorWrapper<OUT, OP> mainOperatorWrapper; @Nullable protected final StreamOperatorWrapper<?, ?> firstOperatorWrapper; @Nullable protected final StreamOperatorWrapper<?, ?> tailOperatorWrapper; protected final Map<StreamConfig.SourceInputConfig, ChainedSource> chainedSources; protected final int numOperators; protected final OperatorEventDispatcherImpl operatorEventDispatcher; protected final Closer closer = Closer.create(); protected final @Nullable FinishedOnRestoreInput finishedOnRestoreInput; protected boolean isClosed; public OperatorChain( StreamTask<OUT, OP> containingTask, RecordWriterDelegate<SerializationDelegate<StreamRecord<OUT>>> recordWriterDelegate) { this.operatorEventDispatcher = new OperatorEventDispatcherImpl( containingTask.getEnvironment().getUserCodeClassLoader().asClassLoader(), containingTask.getEnvironment().getOperatorCoordinatorEventGateway()); final ClassLoader userCodeClassloader = containingTask.getUserCodeClassLoader(); final StreamConfig configuration = containingTask.getConfiguration(); StreamOperatorFactory<OUT> operatorFactory = configuration.getStreamOperatorFactory(userCodeClassloader); // we read the chained configs, and the order of record writer registrations by output name Map<Integer, StreamConfig> chainedConfigs = configuration.getTransitiveChainedTaskConfigsWithSelf(userCodeClassloader); // create the final output stream writers // we iterate through all the out edges from this job vertex and create a stream output List<NonChainedOutput> outputsInOrder = configuration.getVertexNonChainedOutputs(userCodeClassloader); Map<IntermediateDataSetID, RecordWriterOutput<?>> recordWriterOutputs = CollectionUtil.newHashMapWithExpectedSize(outputsInOrder.size()); this.streamOutputs = new RecordWriterOutput<?>[outputsInOrder.size()]; this.finishedOnRestoreInput = this.isTaskDeployedAsFinished() ? new FinishedOnRestoreInput( streamOutputs, configuration.getInputs(userCodeClassloader).length) : null; // from here on, we need to make sure that the output writers are shut down again on failure boolean success = false; try { createChainOutputs( outputsInOrder, recordWriterDelegate, chainedConfigs, containingTask, recordWriterOutputs); // we create the chain of operators and grab the collector that leads into the chain List<StreamOperatorWrapper<?, ?>> allOpWrappers = new ArrayList<>(chainedConfigs.size()); this.mainOperatorOutput = createOutputCollector( containingTask, configuration, chainedConfigs, userCodeClassloader, recordWriterOutputs, allOpWrappers, containingTask.getMailboxExecutorFactory(), operatorFactory != null); if (operatorFactory != null) { Tuple2<OP, Optional<ProcessingTimeService>> mainOperatorAndTimeService = StreamOperatorFactoryUtil.createOperator( operatorFactory, containingTask, configuration, mainOperatorOutput, operatorEventDispatcher); OP mainOperator = mainOperatorAndTimeService.f0; mainOperator .getMetricGroup() .gauge(MetricNames.IO_CURRENT_OUTPUT_WATERMARK, mainOperatorOutput.getWatermarkGauge()); this.mainOperatorWrapper = createOperatorWrapper( mainOperator, containingTask, configuration, mainOperatorAndTimeService.f1, true); // add main operator to end of chain allOpWrappers.add(mainOperatorWrapper); this.tailOperatorWrapper = allOpWrappers.get(0); } else { checkState(allOpWrappers.size() == 0); this.mainOperatorWrapper = null; this.tailOperatorWrapper = null; } this.chainedSources = createChainedSources( containingTask, configuration.getInputs(userCodeClassloader), chainedConfigs, userCodeClassloader, allOpWrappers); this.numOperators = allOpWrappers.size(); firstOperatorWrapper = linkOperatorWrappers(allOpWrappers); success = true; } finally { // make sure we clean up after ourselves in case of a failure after acquiring // the first resources if (!success) { for (int i = 0; i < streamOutputs.length; i++) { if (streamOutputs[i] != null) { streamOutputs[i].close(); } streamOutputs[i] = null; } } } } @VisibleForTesting OperatorChain( List<StreamOperatorWrapper<?, ?>> allOperatorWrappers, RecordWriterOutput<?>[] streamOutputs, WatermarkGaugeExposingOutput<StreamRecord<OUT>> mainOperatorOutput, StreamOperatorWrapper<OUT, OP> mainOperatorWrapper) { this.streamOutputs = streamOutputs; this.finishedOnRestoreInput = null; this.mainOperatorOutput = checkNotNull(mainOperatorOutput); this.operatorEventDispatcher = null; checkState(allOperatorWrappers != null && allOperatorWrappers.size() > 0); this.mainOperatorWrapper = checkNotNull(mainOperatorWrapper); this.tailOperatorWrapper = allOperatorWrappers.get(0); this.numOperators = allOperatorWrappers.size(); this.chainedSources = Collections.emptyMap(); firstOperatorWrapper = linkOperatorWrappers(allOperatorWrappers); } public abstract boolean isTaskDeployedAsFinished(); public abstract void dispatchOperatorEvent( OperatorID operator, SerializedValue<OperatorEvent> event) throws FlinkException; public abstract void prepareSnapshotPreBarrier(long checkpointId) throws Exception; /** * Ends the main operator input specified by {@code inputId}). * * @param inputId the input ID starts from 1 which indicates the first input. */ public abstract void endInput(int inputId) throws Exception; /** * Initialize state and open all operators in the chain from <b>tail to heads</b>, contrary to * {@link StreamOperator#close()} which happens <b>heads to tail</b> (see {@link * #finishOperators(StreamTaskActionExecutor, StopMode)}). */ public abstract void initializeStateAndOpenOperators( StreamTaskStateInitializer streamTaskStateInitializer) throws Exception; /** * Closes all operators in a chain effect way. Closing happens from <b>heads to tail</b> operator * in the chain, contrary to {@link StreamOperator#open()} which happens <b>tail to heads</b> (see * {@link #initializeStateAndOpenOperators(StreamTaskStateInitializer)}). */ public abstract void finishOperators(StreamTaskActionExecutor actionExecutor, StopMode stopMode) throws Exception; public abstract void notifyCheckpointComplete(long checkpointId) throws Exception; public abstract void notifyCheckpointAborted(long checkpointId) throws Exception; public abstract void notifyCheckpointSubsumed(long checkpointId) throws Exception; public abstract void snapshotState( Map<OperatorID, OperatorSnapshotFutures> operatorSnapshotsInProgress, CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, Supplier<Boolean> isRunning, ChannelStateWriter.ChannelStateWriteResult channelStateWriteResult, CheckpointStreamFactory storage) throws Exception; public OperatorEventDispatcher getOperatorEventDispatcher() { return operatorEventDispatcher; } public void broadcastEvent(AbstractEvent event) throws IOException { broadcastEvent(event, false); } public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent) throws IOException { for (RecordWriterOutput<?> streamOutput : streamOutputs) { streamOutput.broadcastEvent(event, isPriorityEvent); } } public void alignedBarrierTimeout(long checkpointId) throws IOException { for (RecordWriterOutput<?> streamOutput : streamOutputs) { streamOutput.alignedBarrierTimeout(checkpointId); } } public void abortCheckpoint(long checkpointId, CheckpointException cause) { for (RecordWriterOutput<?> streamOutput : streamOutputs) { streamOutput.abortCheckpoint(checkpointId, cause); } } /** * Execute {@link StreamOperator#close()} of each operator in the chain of this {@link * StreamTask}. Closing happens from <b>tail to head</b> operator in the chain. */ public void closeAllOperators() throws Exception { isClosed = true; } public RecordWriterOutput<?>[] getStreamOutputs() { return streamOutputs; } /** Returns an {@link Iterable} which traverses all operators in forward topological order. */ @VisibleForTesting public Iterable<StreamOperatorWrapper<?, ?>> getAllOperators() { return getAllOperators(false); } /** * Returns an {@link Iterable} which traverses all operators in forward or reverse topological * order. */ protected Iterable<StreamOperatorWrapper<?, ?>> getAllOperators(boolean reverse) { return reverse ? new StreamOperatorWrapper.ReadIterator(tailOperatorWrapper, true) : new StreamOperatorWrapper.ReadIterator(mainOperatorWrapper, false); } public Input getFinishedOnRestoreInputOrDefault(Input defaultInput) { return finishedOnRestoreInput == null ? defaultInput : finishedOnRestoreInput; } public int getNumberOfOperators() { return numOperators; } public WatermarkGaugeExposingOutput<StreamRecord<OUT>> getMainOperatorOutput() { return mainOperatorOutput; } public ChainedSource getChainedSource(StreamConfig.SourceInputConfig sourceInput) { checkArgument( chainedSources.containsKey(sourceInput), "Chained source with sourcedId = [%s] was not found", sourceInput); return chainedSources.get(sourceInput); } public List<Output<StreamRecord<?>>> getChainedSourceOutputs() { return chainedSources.values().stream() .map(ChainedSource::getSourceOutput) .collect(Collectors.toList()); } public StreamTaskSourceInput<?> getSourceTaskInput(StreamConfig.SourceInputConfig sourceInput) { checkArgument( chainedSources.containsKey(sourceInput), "Chained source with sourcedId = [%s] was not found", sourceInput); return chainedSources.get(sourceInput).getSourceTaskInput(); } public List<StreamTaskSourceInput<?>> getSourceTaskInputs() { return chainedSources.values().stream() .map(ChainedSource::getSourceTaskInput) .collect(Collectors.toList()); } /** * This method should be called before finishing the record emission, to make sure any data that * is still buffered will be sent. It also ensures that all data sending related exceptions are * recognized. * * @throws IOException Thrown, if the buffered data cannot be pushed into the output streams. */ public void flushOutputs() throws IOException { for (RecordWriterOutput<?> streamOutput : getStreamOutputs()) { streamOutput.flush(); } } /** * This method releases all resources of the record writer output. It stops the output flushing * thread (if there is one) and releases all buffers currently held by the output serializers. * * <p>This method should never fail. */ public void close() throws IOException { closer.close(); } @Nullable public OP getMainOperator() { return (mainOperatorWrapper == null) ? null : mainOperatorWrapper.getStreamOperator(); } @Nullable protected StreamOperator<?> getTailOperator() { return (tailOperatorWrapper == null) ? null : tailOperatorWrapper.getStreamOperator(); } protected void snapshotChannelStates( StreamOperator<?> op, ChannelStateWriter.ChannelStateWriteResult channelStateWriteResult, OperatorSnapshotFutures snapshotInProgress) { if (op == getMainOperator()) { snapshotInProgress.setInputChannelStateFuture( channelStateWriteResult .getInputChannelStateHandles() .thenApply(StateObjectCollection::new) .thenApply(SnapshotResult::of)); } if (op == getTailOperator()) { snapshotInProgress.setResultSubpartitionStateFuture( channelStateWriteResult .getResultSubpartitionStateHandles() .thenApply(StateObjectCollection::new) .thenApply(SnapshotResult::of)); } } public boolean isClosed() { return isClosed; } /** Wrapper class to access the chained sources and their's outputs. */ public static class ChainedSource { private final WatermarkGaugeExposingOutput<StreamRecord<?>> chainedSourceOutput; private final StreamTaskSourceInput<?> sourceTaskInput; public ChainedSource( WatermarkGaugeExposingOutput<StreamRecord<?>> chainedSourceOutput, StreamTaskSourceInput<?> sourceTaskInput) { this.chainedSourceOutput = chainedSourceOutput; this.sourceTaskInput = sourceTaskInput; } public WatermarkGaugeExposingOutput<StreamRecord<?>> getSourceOutput() { return chainedSourceOutput; } public StreamTaskSourceInput<?> getSourceTaskInput() { return sourceTaskInput; } } // ------------------------------------------------------------------------ // initialization utilities // ------------------------------------------------------------------------ private void createChainOutputs( List<NonChainedOutput> outputsInOrder, RecordWriterDelegate<SerializationDelegate<StreamRecord<OUT>>> recordWriterDelegate, Map<Integer, StreamConfig> chainedConfigs, StreamTask<OUT, OP> containingTask, Map<IntermediateDataSetID, RecordWriterOutput<?>> recordWriterOutputs) { for (int i = 0; i < outputsInOrder.size(); ++i) { NonChainedOutput output = outputsInOrder.get(i); RecordWriterOutput<?> recordWriterOutput = createStreamOutput( recordWriterDelegate.getRecordWriter(i), output, chainedConfigs.get(output.getSourceNodeId()), containingTask.getEnvironment()); this.streamOutputs[i] = recordWriterOutput; recordWriterOutputs.put(output.getDataSetId(), recordWriterOutput); } } private RecordWriterOutput<OUT> createStreamOutput( RecordWriter<SerializationDelegate<StreamRecord<OUT>>> recordWriter, NonChainedOutput streamOutput, StreamConfig upStreamConfig, Environment taskEnvironment) { OutputTag sideOutputTag = streamOutput.getOutputTag(); // OutputTag, return null if not sideOutput TypeSerializer outSerializer; if (streamOutput.getOutputTag() != null) { // side output outSerializer = upStreamConfig.getTypeSerializerSideOut( streamOutput.getOutputTag(), taskEnvironment.getUserCodeClassLoader().asClassLoader()); } else { // main output outSerializer = upStreamConfig.getTypeSerializerOut( taskEnvironment.getUserCodeClassLoader().asClassLoader()); } return closer.register( new RecordWriterOutput<OUT>( recordWriter, outSerializer, sideOutputTag, streamOutput.supportsUnalignedCheckpoints())); } @SuppressWarnings("rawtypes") private Map<StreamConfig.SourceInputConfig, ChainedSource> createChainedSources( StreamTask<OUT, OP> containingTask, StreamConfig.InputConfig[] configuredInputs, Map<Integer, StreamConfig> chainedConfigs, ClassLoader userCodeClassloader, List<StreamOperatorWrapper<?, ?>> allOpWrappers) { if (Arrays.stream(configuredInputs) .noneMatch(input -> input instanceof StreamConfig.SourceInputConfig)) { return Collections.emptyMap(); } checkState( mainOperatorWrapper.getStreamOperator() instanceof MultipleInputStreamOperator, "Creating chained input is only supported with MultipleInputStreamOperator and MultipleInputStreamTask"); Map<StreamConfig.SourceInputConfig, ChainedSource> chainedSourceInputs = new HashMap<>(); MultipleInputStreamOperator<?> multipleInputOperator = (MultipleInputStreamOperator<?>) mainOperatorWrapper.getStreamOperator(); List<Input> operatorInputs = multipleInputOperator.getInputs(); int sourceInputGateIndex = Arrays.stream(containingTask.getEnvironment().getAllInputGates()) .mapToInt(IndexedInputGate::getInputGateIndex) .max() .orElse(-1) + 1; for (int inputId = 0; inputId < configuredInputs.length; inputId++) { if (!(configuredInputs[inputId] instanceof StreamConfig.SourceInputConfig)) { continue; } StreamConfig.SourceInputConfig sourceInput = (StreamConfig.SourceInputConfig) configuredInputs[inputId]; int sourceEdgeId = sourceInput.getInputEdge().getSourceId(); StreamConfig sourceInputConfig = chainedConfigs.get(sourceEdgeId); OutputTag outputTag = sourceInput.getInputEdge().getOutputTag(); WatermarkGaugeExposingOutput chainedSourceOutput = createChainedSourceOutput( containingTask, sourceInputConfig, userCodeClassloader, getFinishedOnRestoreInputOrDefault(operatorInputs.get(inputId)), multipleInputOperator.getMetricGroup(), outputTag); SourceOperator<?, ?> sourceOperator = (SourceOperator<?, ?>) createOperator( containingTask, sourceInputConfig, userCodeClassloader, (WatermarkGaugeExposingOutput<StreamRecord<OUT>>) chainedSourceOutput, allOpWrappers, true); chainedSourceInputs.put( sourceInput, new ChainedSource( chainedSourceOutput, this.isTaskDeployedAsFinished() ? new StreamTaskFinishedOnRestoreSourceInput<>( sourceOperator, sourceInputGateIndex++, inputId) : new StreamTaskSourceInput<>(sourceOperator, sourceInputGateIndex++, inputId))); } return chainedSourceInputs; } /** * Get the numRecordsOut counter for the operator represented by the given config. And re-use the * operator-level counter for the task-level numRecordsOut counter if this operator is at the end * of the operator chain. * * <p>Return null if we should not use the numRecordsOut counter to track the records emitted by * this operator. */ @Nullable private Counter getOperatorRecordsOutCounter( StreamTask<?, ?> containingTask, StreamConfig operatorConfig) { ClassLoader userCodeClassloader = containingTask.getUserCodeClassLoader(); Class<StreamOperatorFactory<?>> streamOperatorFactoryClass = operatorConfig.getStreamOperatorFactoryClass(userCodeClassloader); // Do not use the numRecordsOut counter on output if this operator is SinkWriterOperator. // // Metric "numRecordsOut" is defined as the total number of records written to the // external system in FLIP-33, but this metric is occupied in AbstractStreamOperator as the // number of records sent to downstream operators, which is number of Committable batches // sent to SinkCommitter. So we skip registering this metric on output and leave this metric // to sink writer implementations to report. try { Class<?> sinkWriterFactoryClass = userCodeClassloader.loadClass(SinkWriterOperatorFactory.class.getName()); if (sinkWriterFactoryClass.isAssignableFrom(streamOperatorFactoryClass)) { return null; } } catch (ClassNotFoundException e) { throw new StreamTaskException( "Could not load SinkWriterOperatorFactory class from userCodeClassloader.", e); } InternalOperatorMetricGroup operatorMetricGroup = containingTask .getEnvironment() .getMetricGroup() .getOrAddOperator(operatorConfig.getOperatorID(), operatorConfig.getOperatorName()); return operatorMetricGroup.getIOMetricGroup().getNumRecordsOutCounter(); } @SuppressWarnings({"rawtypes", "unchecked"}) private WatermarkGaugeExposingOutput<StreamRecord> createChainedSourceOutput( StreamTask<?, OP> containingTask, StreamConfig sourceInputConfig, ClassLoader userCodeClassloader, Input input, OperatorMetricGroup metricGroup, OutputTag outputTag) { Counter recordsOutCounter = getOperatorRecordsOutCounter(containingTask, sourceInputConfig); WatermarkGaugeExposingOutput<StreamRecord> chainedSourceOutput; if (containingTask.getExecutionConfig().isObjectReuseEnabled()) { chainedSourceOutput = new ChainingOutput(input, recordsOutCounter, metricGroup, outputTag); } else { TypeSerializer<?> inSerializer = sourceInputConfig.getTypeSerializerOut(userCodeClassloader); chainedSourceOutput = new CopyingChainingOutput(input, inSerializer, recordsOutCounter, metricGroup, outputTag); } /** * Chained sources are closed when {@link * org.apache.flink.streaming.runtime.io.StreamTaskSourceInput} are being closed. */ return closer.register(chainedSourceOutput); } private <T> WatermarkGaugeExposingOutput<StreamRecord<T>> createOutputCollector( StreamTask<?, ?> containingTask, StreamConfig operatorConfig, Map<Integer, StreamConfig> chainedConfigs, ClassLoader userCodeClassloader, Map<IntermediateDataSetID, RecordWriterOutput<?>> recordWriterOutputs, List<StreamOperatorWrapper<?, ?>> allOperatorWrappers, MailboxExecutorFactory mailboxExecutorFactory, boolean shouldAddMetric) { // --- Begin Gluten-specific code changes --- List<OutputWithChainingCheck<StreamRecord<T>>> allOutputs = new ArrayList<>(4); Map<String, OutputWithChainingCheck<StreamRecord<T>>> glutenOutputs = new HashMap<>(); Map<IntermediateDataSetID, String> node2outputs = Utils.getNodeToNonChainedOutputs(operatorConfig, userCodeClassloader); // create collectors for the network outputs for (NonChainedOutput streamOutput : operatorConfig.getOperatorNonChainedOutputs(userCodeClassloader)) { @SuppressWarnings("unchecked") RecordWriterOutput<T> recordWriterOutput = (RecordWriterOutput<T>) recordWriterOutputs.get(streamOutput.getDataSetId()); allOutputs.add(recordWriterOutput); glutenOutputs.put(node2outputs.get(streamOutput.getDataSetId()), recordWriterOutput); } // Create collectors for the chained outputs for (StreamEdge outputEdge : operatorConfig.getChainedOutputs(userCodeClassloader)) { int outputId = outputEdge.getTargetId(); StreamConfig chainedOpConfig = chainedConfigs.get(outputId); WatermarkGaugeExposingOutput<StreamRecord<T>> output = createOperatorChain( containingTask, operatorConfig, chainedOpConfig, chainedConfigs, userCodeClassloader, recordWriterOutputs, allOperatorWrappers, outputEdge.getOutputTag(), mailboxExecutorFactory, shouldAddMetric); checkState(output instanceof OutputWithChainingCheck); allOutputs.add((OutputWithChainingCheck) output); // If the operator has multiple downstream chained operators, only one of them should // increment the recordsOutCounter for this operator. Set shouldAddMetric to false // so that we would skip adding the counter to other downstream operators. shouldAddMetric = false; } WatermarkGaugeExposingOutput<StreamRecord<T>> result; if (allOutputs.size() == 1) { result = allOutputs.get(0); // only if this is a single RecordWriterOutput, reuse its numRecordOut for task. if (result instanceof RecordWriterOutput) { Counter numRecordsOutCounter = createNumRecordsOutCounter(containingTask); ((RecordWriterOutput<T>) result).setNumRecordsOut(numRecordsOutCounter); } } else { if (glutenOutputs.size() > 0 && allOutputs.size() != glutenOutputs.size()) { throw new RuntimeException("Number of outputs and gluten outputs do not match."); } // TODO: add counter result = closer.register(new GlutenOutputCollector<>(glutenOutputs, null)); } // --- End Gluten-specific code changes --- if (shouldAddMetric) { // Create a CountingOutput to increment the recordsOutCounter for this operator // if we have not added the counter to any downstream chained operator. Counter recordsOutCounter = getOperatorRecordsOutCounter(containingTask, operatorConfig); if (recordsOutCounter != null) { result = new CountingOutput<>(result, recordsOutCounter); } } return result; } private static Counter createNumRecordsOutCounter(StreamTask<?, ?> containingTask) { TaskIOMetricGroup taskIOMetricGroup = containingTask.getEnvironment().getMetricGroup().getIOMetricGroup(); Counter counter = new SimpleCounter(); taskIOMetricGroup.reuseRecordsOutputCounter(counter); return counter; } /** * Recursively create chain of operators that starts from the given {@param operatorConfig}. * Operators are created tail to head and wrapped into an {@link WatermarkGaugeExposingOutput}. */ private <IN, OUT> WatermarkGaugeExposingOutput<StreamRecord<IN>> createOperatorChain( StreamTask<OUT, ?> containingTask, StreamConfig prevOperatorConfig, StreamConfig operatorConfig, Map<Integer, StreamConfig> chainedConfigs, ClassLoader userCodeClassloader, Map<IntermediateDataSetID, RecordWriterOutput<?>> recordWriterOutputs, List<StreamOperatorWrapper<?, ?>> allOperatorWrappers, OutputTag<IN> outputTag, MailboxExecutorFactory mailboxExecutorFactory, boolean shouldAddMetricForPrevOperator) { // create the output that the operator writes to first. this may recursively create more // operators WatermarkGaugeExposingOutput<StreamRecord<OUT>> chainedOperatorOutput = createOutputCollector( containingTask, operatorConfig, chainedConfigs, userCodeClassloader, recordWriterOutputs, allOperatorWrappers, mailboxExecutorFactory, true); OneInputStreamOperator<IN, OUT> chainedOperator = createOperator( containingTask, operatorConfig, userCodeClassloader, chainedOperatorOutput, allOperatorWrappers, false); return wrapOperatorIntoOutput( chainedOperator, containingTask, prevOperatorConfig, operatorConfig, userCodeClassloader, outputTag, shouldAddMetricForPrevOperator); } /** * Create and return a single operator from the given {@param operatorConfig} that will be * producing records to the {@param output}. */ private <OUT, OP extends StreamOperator<OUT>> OP createOperator( StreamTask<OUT, ?> containingTask, StreamConfig operatorConfig, ClassLoader userCodeClassloader, WatermarkGaugeExposingOutput<StreamRecord<OUT>> output, List<StreamOperatorWrapper<?, ?>> allOperatorWrappers, boolean isHead) { // now create the operator and give it the output collector to write its output to Tuple2<OP, Optional<ProcessingTimeService>> chainedOperatorAndTimeService = StreamOperatorFactoryUtil.createOperator( operatorConfig.getStreamOperatorFactory(userCodeClassloader), containingTask, operatorConfig, output, operatorEventDispatcher); OP chainedOperator = chainedOperatorAndTimeService.f0; allOperatorWrappers.add( createOperatorWrapper( chainedOperator, containingTask, operatorConfig, chainedOperatorAndTimeService.f1, isHead)); chainedOperator .getMetricGroup() .gauge(MetricNames.IO_CURRENT_OUTPUT_WATERMARK, output.getWatermarkGauge()::getValue); return chainedOperator; } private <IN, OUT> WatermarkGaugeExposingOutput<StreamRecord<IN>> wrapOperatorIntoOutput( OneInputStreamOperator<IN, OUT> operator, StreamTask<OUT, ?> containingTask, StreamConfig prevOperatorConfig, StreamConfig operatorConfig, ClassLoader userCodeClassloader, OutputTag<IN> outputTag, boolean shouldAddMetricForPrevOperator) { Counter recordsOutCounter = null; if (shouldAddMetricForPrevOperator) { recordsOutCounter = getOperatorRecordsOutCounter(containingTask, prevOperatorConfig); } WatermarkGaugeExposingOutput<StreamRecord<IN>> currentOperatorOutput; if (containingTask.getExecutionConfig().isObjectReuseEnabled()) { currentOperatorOutput = new ChainingOutput<>(operator, recordsOutCounter, operator.getMetricGroup(), outputTag); } else { TypeSerializer<IN> inSerializer = operatorConfig.getTypeSerializerIn1(userCodeClassloader); currentOperatorOutput = new CopyingChainingOutput<>( operator, inSerializer, recordsOutCounter, operator.getMetricGroup(), outputTag); } // wrap watermark gauges since registered metrics must be unique operator .getMetricGroup() .gauge( MetricNames.IO_CURRENT_INPUT_WATERMARK, currentOperatorOutput.getWatermarkGauge()::getValue); return closer.register(currentOperatorOutput); } /** * Links operator wrappers in forward topological order. * * @param allOperatorWrappers is an operator wrapper list of reverse topological order */ private StreamOperatorWrapper<?, ?> linkOperatorWrappers( List<StreamOperatorWrapper<?, ?>> allOperatorWrappers) { StreamOperatorWrapper<?, ?> previous = null; for (StreamOperatorWrapper<?, ?> current : allOperatorWrappers) { if (previous != null) { previous.setPrevious(current); } current.setNext(previous); previous = current; } return previous; } private <T, P extends StreamOperator<T>> StreamOperatorWrapper<T, P> createOperatorWrapper( P operator, StreamTask<?, ?> containingTask, StreamConfig operatorConfig, Optional<ProcessingTimeService> processingTimeService, boolean isHead) { return new StreamOperatorWrapper<>( operator, processingTimeService, containingTask.getMailboxExecutorFactory().createExecutor(operatorConfig.getChainIndex()), isHead); } protected void sendAcknowledgeCheckpointEvent(long checkpointId) { if (operatorEventDispatcher == null) { return; } operatorEventDispatcher .getRegisteredOperators() .forEach( x -> operatorEventDispatcher .getOperatorEventGateway(x) .sendEventToCoordinator(new AcknowledgeCheckpointEvent(checkpointId))); } }
google/android-key-attestation
38,134
src/main/java/com/google/android/attestation/AuthorizationList.java
/* Copyright 2019, The Android Open Source Project, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.attestation; import static com.google.android.attestation.AuthorizationList.UserAuthType.FINGERPRINT; import static com.google.android.attestation.AuthorizationList.UserAuthType.PASSWORD; import static com.google.android.attestation.AuthorizationList.UserAuthType.USER_AUTH_TYPE_ANY; import static com.google.android.attestation.AuthorizationList.UserAuthType.USER_AUTH_TYPE_NONE; import static com.google.android.attestation.Constants.KM_TAG_ACTIVE_DATE_TIME; import static com.google.android.attestation.Constants.KM_TAG_ALGORITHM; import static com.google.android.attestation.Constants.KM_TAG_ALLOW_WHILE_ON_BODY; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_APPLICATION_ID; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_BRAND; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_DEVICE; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_IMEI; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_MANUFACTURER; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_MEID; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_MODEL; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_PRODUCT; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_SECOND_IMEI; import static com.google.android.attestation.Constants.KM_TAG_ATTESTATION_ID_SERIAL; import static com.google.android.attestation.Constants.KM_TAG_AUTH_TIMEOUT; import static com.google.android.attestation.Constants.KM_TAG_BOOT_PATCH_LEVEL; import static com.google.android.attestation.Constants.KM_TAG_CREATION_DATE_TIME; import static com.google.android.attestation.Constants.KM_TAG_DEVICE_UNIQUE_ATTESTATION; import static com.google.android.attestation.Constants.KM_TAG_DIGEST; import static com.google.android.attestation.Constants.KM_TAG_EC_CURVE; import static com.google.android.attestation.Constants.KM_TAG_KEY_SIZE; import static com.google.android.attestation.Constants.KM_TAG_NO_AUTH_REQUIRED; import static com.google.android.attestation.Constants.KM_TAG_ORIGIN; import static com.google.android.attestation.Constants.KM_TAG_ORIGINATION_EXPIRE_DATE_TIME; import static com.google.android.attestation.Constants.KM_TAG_OS_PATCH_LEVEL; import static com.google.android.attestation.Constants.KM_TAG_OS_VERSION; import static com.google.android.attestation.Constants.KM_TAG_PADDING; import static com.google.android.attestation.Constants.KM_TAG_PURPOSE; import static com.google.android.attestation.Constants.KM_TAG_ROLLBACK_RESISTANCE; import static com.google.android.attestation.Constants.KM_TAG_ROOT_OF_TRUST; import static com.google.android.attestation.Constants.KM_TAG_RSA_PUBLIC_EXPONENT; import static com.google.android.attestation.Constants.KM_TAG_TRUSTED_CONFIRMATION_REQUIRED; import static com.google.android.attestation.Constants.KM_TAG_TRUSTED_USER_PRESENCE_REQUIRED; import static com.google.android.attestation.Constants.KM_TAG_UNLOCKED_DEVICE_REQUIRED; import static com.google.android.attestation.Constants.KM_TAG_USAGE_EXPIRE_DATE_TIME; import static com.google.android.attestation.Constants.KM_TAG_USER_AUTH_TYPE; import static com.google.android.attestation.Constants.KM_TAG_VENDOR_PATCH_LEVEL; import static com.google.android.attestation.Constants.UINT32_MAX; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static java.util.Arrays.stream; import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Streams; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.Immutable; import com.google.protobuf.ByteString; import java.time.Duration; import java.time.Instant; import java.time.LocalDate; import java.time.YearMonth; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Stream; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.ASN1Integer; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.ASN1Set; import org.bouncycastle.asn1.ASN1TaggedObject; import org.bouncycastle.asn1.ASN1Util; import org.bouncycastle.asn1.DERNull; import org.bouncycastle.asn1.DEROctetString; import org.bouncycastle.asn1.DERSequence; import org.bouncycastle.asn1.DERSet; import org.bouncycastle.asn1.DERTaggedObject; /** * This data structure contains the key pair's properties themselves, as defined in the Keymaster * hardware abstraction layer (HAL). You compare these values to the device's current state or to a * set of expected values to verify that a key pair is still valid for use in your app. */ @AutoValue @Immutable public abstract class AuthorizationList { /** Specifies the types of user authenticators that may be used to authorize this key. */ public enum UserAuthType { USER_AUTH_TYPE_NONE, PASSWORD, FINGERPRINT, USER_AUTH_TYPE_ANY } /** * Asymmetric algorithms from * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/Algorithm.aidl */ public enum Algorithm { RSA, EC, } private static final ImmutableMap<Algorithm, Integer> ALGORITHM_TO_ASN1 = ImmutableMap.of(Algorithm.RSA, 1, Algorithm.EC, 3); private static final ImmutableMap<Integer, Algorithm> ASN1_TO_ALGORITHM = ImmutableMap.of(1, Algorithm.RSA, 3, Algorithm.EC); /** * From * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/EcCurve.aidl */ public enum EcCurve { P_224, P_256, P_384, P_521, CURVE_25519 } private static final ImmutableMap<EcCurve, Integer> EC_CURVE_TO_ASN1 = ImmutableMap.of( EcCurve.P_224, 0, EcCurve.P_256, 1, EcCurve.P_384, 2, EcCurve.P_521, 3, EcCurve.CURVE_25519, 4); private static final ImmutableMap<Integer, EcCurve> ASN1_TO_EC_CURVE = ImmutableMap.of( 0, EcCurve.P_224, 1, EcCurve.P_256, 2, EcCurve.P_384, 3, EcCurve.P_521, 4, EcCurve.CURVE_25519); /** * From * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/PaddingMode.aidl */ public enum PaddingMode { NONE, RSA_OAEP, RSA_PSS, RSA_PKCS1_1_5_ENCRYPT, RSA_PKCS1_1_5_SIGN, PKCS7 } static final ImmutableMap<PaddingMode, Integer> PADDING_MODE_TO_ASN1 = ImmutableMap.of( PaddingMode.NONE, 1, PaddingMode.RSA_OAEP, 2, PaddingMode.RSA_PSS, 3, PaddingMode.RSA_PKCS1_1_5_ENCRYPT, 4, PaddingMode.RSA_PKCS1_1_5_SIGN, 5, PaddingMode.PKCS7, 64); static final ImmutableMap<Integer, PaddingMode> ASN1_TO_PADDING_MODE = ImmutableMap.of( 1, PaddingMode.NONE, 2, PaddingMode.RSA_OAEP, 3, PaddingMode.RSA_PSS, 4, PaddingMode.RSA_PKCS1_1_5_ENCRYPT, 5, PaddingMode.RSA_PKCS1_1_5_SIGN, 64, PaddingMode.PKCS7); /** * From * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/Digest.aidl */ public enum DigestMode { NONE, MD5, SHA1, SHA_2_224, SHA_2_256, SHA_2_384, SHA_2_512 } static final ImmutableMap<DigestMode, Integer> DIGEST_MODE_TO_ASN1 = ImmutableMap.of( DigestMode.NONE, 0, DigestMode.MD5, 1, DigestMode.SHA1, 2, DigestMode.SHA_2_224, 3, DigestMode.SHA_2_256, 4, DigestMode.SHA_2_384, 5, DigestMode.SHA_2_512, 6); static final ImmutableMap<Integer, DigestMode> ASN1_TO_DIGEST_MODE = ImmutableMap.of( 0, DigestMode.NONE, 1, DigestMode.MD5, 2, DigestMode.SHA1, 3, DigestMode.SHA_2_224, 4, DigestMode.SHA_2_256, 5, DigestMode.SHA_2_384, 6, DigestMode.SHA_2_512); /** * From * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/KeyOrigin.aidl */ public enum KeyOrigin { GENERATED, DERIVED, IMPORTED, RESERVED, SECURELY_IMPORTED } static final ImmutableMap<KeyOrigin, Integer> KEY_ORIGIN_TO_ASN1 = ImmutableMap.of( KeyOrigin.GENERATED, 0, KeyOrigin.IMPORTED, 1, KeyOrigin.DERIVED, 2, KeyOrigin.RESERVED, 3, KeyOrigin.SECURELY_IMPORTED, 4); static final ImmutableMap<Integer, KeyOrigin> ASN1_TO_KEY_ORIGIN = ImmutableMap.of( 0, KeyOrigin.GENERATED, 1, KeyOrigin.IMPORTED, 2, KeyOrigin.DERIVED, 3, KeyOrigin.RESERVED, 4, KeyOrigin.SECURELY_IMPORTED); /** * From * https://cs.android.com/android/platform/superproject/+/master:hardware/interfaces/security/keymint/aidl/android/hardware/security/keymint/KeyPurpose.aidl */ public enum OperationPurpose { ENCRYPT, DECRYPT, SIGN, VERIFY, WRAP_KEY, AGREE_KEY, ATTEST_KEY } static final ImmutableMap<OperationPurpose, Integer> OPERATION_PURPOSE_TO_ASN1 = ImmutableMap.of( OperationPurpose.ENCRYPT, 0, OperationPurpose.DECRYPT, 1, OperationPurpose.SIGN, 2, OperationPurpose.VERIFY, 3, OperationPurpose.WRAP_KEY, 5, OperationPurpose.AGREE_KEY, 6, OperationPurpose.ATTEST_KEY, 7); static final ImmutableMap<Integer, OperationPurpose> ASN1_TO_OPERATION_PURPOSE = ImmutableMap.of( 0, OperationPurpose.ENCRYPT, 1, OperationPurpose.DECRYPT, 2, OperationPurpose.SIGN, 3, OperationPurpose.VERIFY, 5, OperationPurpose.WRAP_KEY, 6, OperationPurpose.AGREE_KEY, 7, OperationPurpose.ATTEST_KEY); public abstract ImmutableSet<OperationPurpose> purpose(); public abstract Optional<Algorithm> algorithm(); public abstract Optional<Integer> keySize(); public abstract ImmutableSet<DigestMode> digest(); public abstract ImmutableSet<PaddingMode> padding(); public abstract Optional<EcCurve> ecCurve(); public abstract Optional<Long> rsaPublicExponent(); public abstract boolean rollbackResistance(); public abstract Optional<Instant> activeDateTime(); public abstract Optional<Instant> originationExpireDateTime(); public abstract Optional<Instant> usageExpireDateTime(); public abstract boolean noAuthRequired(); public abstract ImmutableSet<UserAuthType> userAuthType(); public abstract Optional<Duration> authTimeout(); public abstract boolean allowWhileOnBody(); public abstract boolean trustedUserPresenceRequired(); public abstract boolean trustedConfirmationRequired(); public abstract boolean unlockedDeviceRequired(); public abstract Optional<Instant> creationDateTime(); public abstract Optional<KeyOrigin> origin(); public abstract Optional<RootOfTrust> rootOfTrust(); public abstract Optional<Integer> osVersion(); public abstract Optional<YearMonth> osPatchLevel(); public abstract Optional<AttestationApplicationId> attestationApplicationId(); public abstract Optional<ByteString> attestationIdBrand(); public abstract Optional<ByteString> attestationIdDevice(); public abstract Optional<ByteString> attestationIdProduct(); public abstract Optional<ByteString> attestationIdSerial(); public abstract Optional<ByteString> attestationIdImei(); public abstract Optional<ByteString> attestationIdSecondImei(); public abstract Optional<ByteString> attestationIdMeid(); public abstract Optional<ByteString> attestationIdManufacturer(); public abstract Optional<ByteString> attestationIdModel(); public abstract Optional<LocalDate> vendorPatchLevel(); public abstract Optional<LocalDate> bootPatchLevel(); public abstract boolean individualAttestation(); public abstract ImmutableList<Integer> unorderedTags(); public abstract Builder toBuilder(); public static Builder builder() { return new AutoValue_AuthorizationList.Builder() .setRollbackResistance(false) .setNoAuthRequired(false) .setAllowWhileOnBody(false) .setTrustedUserPresenceRequired(false) .setTrustedConfirmationRequired(false) .setUnlockedDeviceRequired(false) .setIndividualAttestation(false); } /** * Builder for an AuthorizationList. Any field not set will be made an Optional.empty or set with * the default value. */ @AutoValue.Builder public abstract static class Builder { abstract ImmutableSet.Builder<OperationPurpose> purposeBuilder(); @CanIgnoreReturnValue public final Builder addPurpose(OperationPurpose value) { purposeBuilder().add(value); return this; } public abstract Builder setAlgorithm(Algorithm algorithm); public abstract Builder setKeySize(Integer keySize); abstract ImmutableSet.Builder<DigestMode> digestBuilder(); @CanIgnoreReturnValue public final Builder addDigest(DigestMode value) { digestBuilder().add(value); return this; } abstract ImmutableSet.Builder<PaddingMode> paddingBuilder(); @CanIgnoreReturnValue public final Builder addPadding(PaddingMode value) { paddingBuilder().add(value); return this; } public abstract Builder setEcCurve(EcCurve ecCurve); public abstract Builder setRsaPublicExponent(Long rsaPublicExponent); public abstract Builder setRollbackResistance(boolean rollbackResistance); public abstract Builder setActiveDateTime(Instant activeDateTime); public abstract Builder setOriginationExpireDateTime(Instant originationExpireDateTime); public abstract Builder setUsageExpireDateTime(Instant usageExpireDateTime); public abstract Builder setNoAuthRequired(boolean noAuthRequired); abstract ImmutableSet.Builder<UserAuthType> userAuthTypeBuilder(); @CanIgnoreReturnValue public final Builder addUserAuthType(UserAuthType value) { userAuthTypeBuilder().add(value); return this; } public abstract Builder setAuthTimeout(Duration authTimeout); public abstract Builder setAllowWhileOnBody(boolean allowWhileOnBody); public abstract Builder setTrustedUserPresenceRequired(boolean trustedUserPresenceRequired); public abstract Builder setTrustedConfirmationRequired(boolean trustedConfirmationRequired); public abstract Builder setUnlockedDeviceRequired(boolean unlockedDeviceRequired); public abstract Builder setCreationDateTime(Instant creationDateTime); public abstract Builder setOrigin(KeyOrigin origin); public abstract Builder setRootOfTrust(RootOfTrust rootOfTrust); public abstract Builder setOsVersion(Integer osVersion); public abstract Builder setOsPatchLevel(YearMonth osPatchLevel); public abstract Builder setAttestationApplicationId( AttestationApplicationId attestationApplicationId); public abstract Builder setAttestationIdBrand(ByteString attestationIdBrand); @CanIgnoreReturnValue public final Builder setAttestationIdBrand(String value) { return setAttestationIdBrand(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdDevice(ByteString attestationIdDevice); @CanIgnoreReturnValue public final Builder setAttestationIdDevice(String value) { return setAttestationIdDevice(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdProduct(ByteString attestationIdProduct); @CanIgnoreReturnValue public final Builder setAttestationIdProduct(String value) { return setAttestationIdProduct(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdSerial(ByteString attestationIdSerial); @CanIgnoreReturnValue public final Builder setAttestationIdSerial(String value) { return setAttestationIdSerial(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdImei(ByteString attestationIdImei); @CanIgnoreReturnValue public final Builder setAttestationIdImei(String value) { return setAttestationIdImei(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdSecondImei(ByteString attestationIdSecondImei); @CanIgnoreReturnValue public final Builder setAttestationIdSecondImei(String value) { return setAttestationIdSecondImei(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdMeid(ByteString attestationIdMeid); @CanIgnoreReturnValue public final Builder setAttestationIdMeid(String value) { return setAttestationIdMeid(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdManufacturer(ByteString attestationIdManufacturer); @CanIgnoreReturnValue public final Builder setAttestationIdManufacturer(String value) { return setAttestationIdManufacturer(ByteString.copyFromUtf8(value)); } public abstract Builder setAttestationIdModel(ByteString attestationIdModel); @CanIgnoreReturnValue public final Builder setAttestationIdModel(String value) { return setAttestationIdModel(ByteString.copyFromUtf8(value)); } public abstract Builder setVendorPatchLevel(LocalDate vendorPatchLevel); public abstract Builder setBootPatchLevel(LocalDate bootPatchLevel); public abstract Builder setIndividualAttestation(boolean individualAttestation); abstract ImmutableList.Builder<Integer> unorderedTagsBuilder(); @CanIgnoreReturnValue public final Builder addUnorderedTag(Integer value) { unorderedTagsBuilder().add(value); return this; } public abstract AuthorizationList build(); } static AuthorizationList createAuthorizationList( ASN1Encodable[] authorizationList, int attestationVersion) { Builder builder = AuthorizationList.builder(); ParsedAuthorizationMap parsedAuthorizationMap = new ParsedAuthorizationMap(authorizationList); parsedAuthorizationMap.findIntegerSetAuthorizationListEntry(KM_TAG_PURPOSE).stream() .map(ASN1_TO_OPERATION_PURPOSE::get) .forEach(builder::addPurpose); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_ALGORITHM) .map(ASN1_TO_ALGORITHM::get) .ifPresent(builder::setAlgorithm); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_KEY_SIZE) .ifPresent(builder::setKeySize); parsedAuthorizationMap.findIntegerSetAuthorizationListEntry(KM_TAG_DIGEST).stream() .map(ASN1_TO_DIGEST_MODE::get) .forEach(builder::addDigest); parsedAuthorizationMap.findIntegerSetAuthorizationListEntry(KM_TAG_PADDING).stream() .map(ASN1_TO_PADDING_MODE::get) .forEach(builder::addPadding); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_EC_CURVE) .map(ASN1_TO_EC_CURVE::get) .ifPresent(builder::setEcCurve); parsedAuthorizationMap .findOptionalLongAuthorizationListEntry(KM_TAG_RSA_PUBLIC_EXPONENT) .ifPresent(builder::setRsaPublicExponent); builder.setRollbackResistance( parsedAuthorizationMap.findBooleanAuthorizationListEntry(KM_TAG_ROLLBACK_RESISTANCE)); parsedAuthorizationMap .findOptionalInstantMillisAuthorizationListEntry(KM_TAG_ACTIVE_DATE_TIME) .ifPresent(builder::setActiveDateTime); parsedAuthorizationMap .findOptionalInstantMillisAuthorizationListEntry(KM_TAG_ORIGINATION_EXPIRE_DATE_TIME) .ifPresent(builder::setOriginationExpireDateTime); parsedAuthorizationMap .findOptionalInstantMillisAuthorizationListEntry(KM_TAG_USAGE_EXPIRE_DATE_TIME) .ifPresent(builder::setUsageExpireDateTime); builder.setNoAuthRequired( parsedAuthorizationMap.findBooleanAuthorizationListEntry(KM_TAG_NO_AUTH_REQUIRED)); parsedAuthorizationMap .findOptionalLongAuthorizationListEntry(KM_TAG_USER_AUTH_TYPE) .map(AuthorizationList::userAuthTypeToEnum) .ifPresent(it -> it.forEach(builder::addUserAuthType)); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_AUTH_TIMEOUT) .map(Duration::ofSeconds) .ifPresent(builder::setAuthTimeout); builder.setAllowWhileOnBody( parsedAuthorizationMap.findBooleanAuthorizationListEntry(KM_TAG_ALLOW_WHILE_ON_BODY)); builder.setTrustedUserPresenceRequired( parsedAuthorizationMap.findBooleanAuthorizationListEntry( KM_TAG_TRUSTED_USER_PRESENCE_REQUIRED)); builder.setTrustedConfirmationRequired( parsedAuthorizationMap.findBooleanAuthorizationListEntry( KM_TAG_TRUSTED_CONFIRMATION_REQUIRED)); builder.setUnlockedDeviceRequired( parsedAuthorizationMap.findBooleanAuthorizationListEntry(KM_TAG_UNLOCKED_DEVICE_REQUIRED)); parsedAuthorizationMap .findOptionalInstantMillisAuthorizationListEntry(KM_TAG_CREATION_DATE_TIME) .ifPresent(builder::setCreationDateTime); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_ORIGIN) .map(ASN1_TO_KEY_ORIGIN::get) .ifPresent(builder::setOrigin); parsedAuthorizationMap .findAuthorizationListEntry(KM_TAG_ROOT_OF_TRUST) .map(ASN1Sequence.class::cast) .map(rootOfTrust -> RootOfTrust.createRootOfTrust(rootOfTrust, attestationVersion)) .ifPresent(builder::setRootOfTrust); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_OS_VERSION) .ifPresent(builder::setOsVersion); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_OS_PATCH_LEVEL) .map(String::valueOf) .map(AuthorizationList::toYearMonth) .ifPresent(builder::setOsPatchLevel); parsedAuthorizationMap .findAuthorizationListEntry(KM_TAG_ATTESTATION_APPLICATION_ID) .map(ASN1OctetString.class::cast) .map(ASN1OctetString::getOctets) .map(AttestationApplicationId::createAttestationApplicationId) .ifPresent(builder::setAttestationApplicationId); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_BRAND) .ifPresent(builder::setAttestationIdBrand); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_DEVICE) .ifPresent(builder::setAttestationIdDevice); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_PRODUCT) .ifPresent(builder::setAttestationIdProduct); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_SERIAL) .ifPresent(builder::setAttestationIdSerial); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_IMEI) .ifPresent(builder::setAttestationIdImei); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_SECOND_IMEI) .ifPresent(builder::setAttestationIdSecondImei); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_MEID) .ifPresent(builder::setAttestationIdMeid); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_MANUFACTURER) .ifPresent(builder::setAttestationIdManufacturer); parsedAuthorizationMap .findOptionalByteArrayAuthorizationListEntry(KM_TAG_ATTESTATION_ID_MODEL) .ifPresent(builder::setAttestationIdModel); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_VENDOR_PATCH_LEVEL) .map(String::valueOf) .map(AuthorizationList::toLocalDate) .ifPresent(builder::setVendorPatchLevel); parsedAuthorizationMap .findOptionalIntegerAuthorizationListEntry(KM_TAG_BOOT_PATCH_LEVEL) .map(String::valueOf) .map(AuthorizationList::toLocalDate) .ifPresent(builder::setBootPatchLevel); builder.setIndividualAttestation( parsedAuthorizationMap.findBooleanAuthorizationListEntry(KM_TAG_DEVICE_UNIQUE_ATTESTATION)); parsedAuthorizationMap.getUnorderedTags().forEach(builder::addUnorderedTag); return builder.build(); } @VisibleForTesting static LocalDate toLocalDate(String value) { checkArgument(value.length() == 6 || value.length() == 8); int year = Integer.parseInt(value.substring(0, 4)); int month = Integer.parseInt(value.substring(4, 6)) == 0 ? 1 : Integer.parseInt(value.substring(4, 6)); int day = value.length() == 8 && !value.substring(6, 8).equals("00") ? Integer.parseInt(value.substring(6, 8)) : 1; return LocalDate.of(year, month, day); } private static YearMonth toYearMonth(String value) { checkArgument(value.length() == 6); try { return YearMonth.parse(value, DateTimeFormatter.ofPattern("yyyyMM")); } catch (DateTimeParseException e) { throw new IllegalArgumentException(e); } } private static String toString(LocalDate date) { return date.format(DateTimeFormatter.ofPattern("yyyyMMdd")); } private static String toString(YearMonth date) { return date.format(DateTimeFormatter.ofPattern("yyyyMM")); } @VisibleForTesting static ImmutableSet<UserAuthType> userAuthTypeToEnum(long userAuthType) { if (userAuthType == 0) { return ImmutableSet.of(USER_AUTH_TYPE_NONE); } ImmutableSet.Builder<UserAuthType> builder = ImmutableSet.builder(); if ((userAuthType & 1L) == 1L) { builder.add(PASSWORD); } if ((userAuthType & 2L) == 2L) { builder.add(FINGERPRINT); } if (userAuthType == UINT32_MAX) { builder.add(USER_AUTH_TYPE_ANY); } ImmutableSet<UserAuthType> result = builder.build(); if (result.isEmpty()) { throw new IllegalArgumentException("Invalid User Auth Type."); } return result; } private static Long userAuthTypeToLong(Set<UserAuthType> userAuthType) { if (userAuthType.contains(USER_AUTH_TYPE_NONE)) { return 0L; } long result = 0L; for (UserAuthType type : userAuthType) { switch (type) { case PASSWORD: result |= 1L; break; case FINGERPRINT: result |= 2L; break; case USER_AUTH_TYPE_ANY: result |= UINT32_MAX; break; default: break; } } if (result == 0) { throw new IllegalArgumentException("Invalid User Auth Type."); } return result; } public ASN1Sequence toAsn1Sequence() { ASN1EncodableVector vector = new ASN1EncodableVector(); addOptionalIntegerSet( KM_TAG_PURPOSE, this.purpose().stream() .flatMap(key -> Stream.ofNullable(OPERATION_PURPOSE_TO_ASN1.get(key))) .collect(toImmutableSet()), vector); addOptionalInteger(KM_TAG_ALGORITHM, this.algorithm().map(ALGORITHM_TO_ASN1::get), vector); addOptionalInteger(KM_TAG_KEY_SIZE, this.keySize(), vector); addOptionalIntegerSet( KM_TAG_DIGEST, this.digest().stream() .flatMap(key -> Stream.ofNullable(DIGEST_MODE_TO_ASN1.get(key))) .collect(toImmutableSet()), vector); addOptionalIntegerSet( KM_TAG_PADDING, this.padding().stream() .flatMap(key -> Stream.ofNullable(PADDING_MODE_TO_ASN1.get(key))) .collect(toImmutableSet()), vector); addOptionalInteger(KM_TAG_EC_CURVE, this.ecCurve().map(EC_CURVE_TO_ASN1::get), vector); addOptionalLong(KM_TAG_RSA_PUBLIC_EXPONENT, this.rsaPublicExponent(), vector); addBoolean(KM_TAG_ROLLBACK_RESISTANCE, this.rollbackResistance(), vector); addOptionalInstant(KM_TAG_ACTIVE_DATE_TIME, this.activeDateTime(), vector); addOptionalInstant( KM_TAG_ORIGINATION_EXPIRE_DATE_TIME, this.originationExpireDateTime(), vector); addOptionalInstant(KM_TAG_USAGE_EXPIRE_DATE_TIME, this.usageExpireDateTime(), vector); addBoolean(KM_TAG_NO_AUTH_REQUIRED, this.noAuthRequired(), vector); addOptionalUserAuthType(KM_TAG_USER_AUTH_TYPE, this.userAuthType(), vector); addOptionalDuration(KM_TAG_AUTH_TIMEOUT, this.authTimeout(), vector); addBoolean(KM_TAG_ALLOW_WHILE_ON_BODY, this.allowWhileOnBody(), vector); addBoolean(KM_TAG_TRUSTED_USER_PRESENCE_REQUIRED, this.trustedUserPresenceRequired(), vector); addBoolean(KM_TAG_TRUSTED_CONFIRMATION_REQUIRED, this.trustedConfirmationRequired(), vector); addBoolean(KM_TAG_UNLOCKED_DEVICE_REQUIRED, this.unlockedDeviceRequired(), vector); addOptionalInstant(KM_TAG_CREATION_DATE_TIME, this.creationDateTime(), vector); addOptionalInteger(KM_TAG_ORIGIN, this.origin().map(KEY_ORIGIN_TO_ASN1::get), vector); addOptionalRootOfTrust(KM_TAG_ROOT_OF_TRUST, this.rootOfTrust(), vector); addOptionalInteger(KM_TAG_OS_VERSION, this.osVersion(), vector); addOptionalInteger( KM_TAG_OS_PATCH_LEVEL, this.osPatchLevel().map(AuthorizationList::toString).map(Integer::valueOf), vector); this.attestationApplicationId() .map(AttestationApplicationId::getEncoded) .map(DEROctetString::new) .map(obj -> new DERTaggedObject(KM_TAG_ATTESTATION_APPLICATION_ID, obj)) .ifPresent(vector::add); addOptionalOctetString(KM_TAG_ATTESTATION_ID_BRAND, this.attestationIdBrand(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_DEVICE, this.attestationIdDevice(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_PRODUCT, this.attestationIdProduct(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_SERIAL, this.attestationIdSerial(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_IMEI, this.attestationIdImei(), vector); addOptionalOctetString( KM_TAG_ATTESTATION_ID_SECOND_IMEI, this.attestationIdSecondImei(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_MEID, this.attestationIdMeid(), vector); addOptionalOctetString( KM_TAG_ATTESTATION_ID_MANUFACTURER, this.attestationIdManufacturer(), vector); addOptionalOctetString(KM_TAG_ATTESTATION_ID_MODEL, this.attestationIdModel(), vector); addOptionalInteger( KM_TAG_VENDOR_PATCH_LEVEL, this.vendorPatchLevel().map(AuthorizationList::toString).map(Integer::valueOf), vector); addOptionalInteger( KM_TAG_BOOT_PATCH_LEVEL, this.bootPatchLevel().map(AuthorizationList::toString).map(Integer::valueOf), vector); addBoolean(KM_TAG_DEVICE_UNIQUE_ATTESTATION, this.individualAttestation(), vector); return new DERSequence(vector); } private static void addOptionalIntegerSet( int tag, Set<Integer> entry, ASN1EncodableVector vector) { if (!entry.isEmpty()) { ASN1EncodableVector tmp = new ASN1EncodableVector(); entry.forEach((Integer value) -> tmp.add(new ASN1Integer(value.longValue()))); vector.add(new DERTaggedObject(tag, new DERSet(tmp))); } } private static void addOptionalInstant( int tag, Optional<Instant> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, new ASN1Integer(entry.get().toEpochMilli()))); } } private static void addOptionalDuration( int tag, Optional<Duration> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, new ASN1Integer(entry.get().getSeconds()))); } } private static void addBoolean(int tag, boolean entry, ASN1EncodableVector vector) { if (entry) { vector.add(new DERTaggedObject(tag, DERNull.INSTANCE)); } } private static void addOptionalInteger( int tag, Optional<Integer> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, new ASN1Integer(entry.get()))); } } private static void addOptionalLong(int tag, Optional<Long> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, new ASN1Integer(entry.get()))); } } private static void addOptionalOctetString( int tag, Optional<ByteString> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, new DEROctetString(entry.get().toByteArray()))); } } private static void addOptionalUserAuthType( int tag, Set<UserAuthType> entry, ASN1EncodableVector vector) { if (!entry.isEmpty()) { vector.add(new DERTaggedObject(tag, new ASN1Integer(userAuthTypeToLong(entry)))); } } private static void addOptionalRootOfTrust( int tag, Optional<RootOfTrust> entry, ASN1EncodableVector vector) { if (entry.isPresent()) { vector.add(new DERTaggedObject(tag, entry.get().toAsn1Sequence())); } } /** * This data structure holds the parsed attest record authorizations mapped to their authorization * tags and a list of unordered authorization tags found in this authorization list. */ private static class ParsedAuthorizationMap { private final ImmutableListMultimap<Integer, ASN1Object> authorizationMap; private final ImmutableList<Integer> unorderedTags; private ParsedAuthorizationMap(ASN1Encodable[] authorizationList) { this.authorizationMap = stream(authorizationList) .map(ASN1TaggedObject::getInstance) .collect( ImmutableListMultimap.toImmutableListMultimap( ASN1TaggedObject::getTagNo, obj -> ASN1Util.getExplicitContextBaseObject(obj, obj.getTagNo()))); ImmutableList.Builder<Integer> unorderedTags = ImmutableList.builder(); int previousTag = 0; for (int currentTag : authorizationMap.keySet()) { if (previousTag > currentTag) { unorderedTags.add(previousTag); } previousTag = currentTag; } this.unorderedTags = unorderedTags.build(); } private ImmutableList<Integer> getUnorderedTags() { return unorderedTags; } private Optional<ASN1Object> findAuthorizationListEntry(int tag) { ImmutableList<ASN1Object> entries = authorizationMap.get(tag); if (entries.isEmpty()) { return Optional.empty(); } else if (entries.size() == 1) { return Optional.of(entries.get(0)); } else { throw new IllegalStateException( String.format("Multiple authorization list entries for tag %d", tag)); } } private Optional<Integer> findOptionalIntegerAuthorizationListEntry(int tag) { return findAuthorizationListEntry(tag) .map(ASN1Integer.class::cast) .map(ASN1Parsing::getIntegerFromAsn1); } private ImmutableSet<Integer> findIntegerSetAuthorizationListEntry(int tag) { // if there are duplicate ASN1Set(s) for given tag, merge them into one set return authorizationMap.get(tag).stream() .map(ASN1Set.class::cast) .flatMap(Streams::stream) .map(ASN1Parsing::getIntegerFromAsn1) .collect(toImmutableSet()); } private Optional<Instant> findOptionalInstantMillisAuthorizationListEntry(int tag) { Optional<Long> millis = findOptionalLongAuthorizationListEntry(tag); return millis.map(Instant::ofEpochMilli); } private Optional<Long> findOptionalLongAuthorizationListEntry(int tag) { return findAuthorizationListEntry(tag) .map(ASN1Integer.class::cast) .map(value -> value.getValue().longValue()); } private boolean findBooleanAuthorizationListEntry(int tag) { return findAuthorizationListEntry(tag).isPresent(); } private Optional<ByteString> findOptionalByteArrayAuthorizationListEntry(int tag) { return findAuthorizationListEntry(tag) .map(ASN1OctetString.class::cast) .map(ASN1OctetString::getOctets) .map(ByteString::copyFrom); } } }
googleapis/google-cloud-java
37,764
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListAnalysesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/lva_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Message for requesting list of Analyses * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAnalysesRequest} */ public final class ListAnalysesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListAnalysesRequest) ListAnalysesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListAnalysesRequest.newBuilder() to construct. private ListAnalysesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAnalysesRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAnalysesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ListAnalysesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ListAnalysesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAnalysesRequest.class, com.google.cloud.visionai.v1.ListAnalysesRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object orderBy_ = ""; /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListAnalysesRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListAnalysesRequest other = (com.google.cloud.visionai.v1.ListAnalysesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnalysesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListAnalysesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for requesting list of Analyses * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAnalysesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListAnalysesRequest) com.google.cloud.visionai.v1.ListAnalysesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ListAnalysesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ListAnalysesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAnalysesRequest.class, com.google.cloud.visionai.v1.ListAnalysesRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListAnalysesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.LvaServiceProto .internal_static_google_cloud_visionai_v1_ListAnalysesRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnalysesRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListAnalysesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListAnalysesRequest build() { com.google.cloud.visionai.v1.ListAnalysesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnalysesRequest buildPartial() { com.google.cloud.visionai.v1.ListAnalysesRequest result = new com.google.cloud.visionai.v1.ListAnalysesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.visionai.v1.ListAnalysesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.orderBy_ = orderBy_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListAnalysesRequest) { return mergeFrom((com.google.cloud.visionai.v1.ListAnalysesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListAnalysesRequest other) { if (other == com.google.cloud.visionai.v1.ListAnalysesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { orderBy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListAnalysesRequest * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Requested page size. Server may return fewer items than requested. * If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Filtering results * </pre> * * <code>string filter = 4;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * * * <pre> * Hint for how to order the results * </pre> * * <code>string order_by = 5;</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListAnalysesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListAnalysesRequest) private static final com.google.cloud.visionai.v1.ListAnalysesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListAnalysesRequest(); } public static com.google.cloud.visionai.v1.ListAnalysesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAnalysesRequest> PARSER = new com.google.protobuf.AbstractParser<ListAnalysesRequest>() { @java.lang.Override public ListAnalysesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAnalysesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAnalysesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnalysesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/tika
37,840
tika-parsers/tika-parsers-standard/tika-parsers-standard-modules/tika-parser-pdf-module/src/main/java/org/apache/tika/parser/pdf/PDFParserConfig.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.parser.pdf; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.HashSet; import java.util.Locale; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.pdfbox.rendering.ImageType; import org.apache.pdfbox.text.PDFTextStripper; import org.apache.tika.exception.TikaException; import org.apache.tika.parser.pdf.image.ImageGraphicsEngineFactory; import org.apache.tika.renderer.Renderer; /** * Config for PDFParser. * <p/> * This allows parameters to be set programmatically: * <ol> * <li>Calls to PDFParser, i.e. parser.getPDFParserConfig().setEnableAutoSpace() (as before)</li> * <li>Passing to PDFParser through a ParseContext: context.set(PDFParserConfig.class, config);</li> * </ol> * <p/> */ public class PDFParserConfig implements Serializable { public enum TikaImageType { RGB(ImageType.RGB), GRAY(ImageType.GRAY); private ImageType imageType; TikaImageType(ImageType imageType) { this.imageType = imageType; } public ImageType getImageType() { return imageType; } } private static final long serialVersionUID = 6492570218190936986L; private final Set<String> userConfigured = new HashSet<>(); // True if we let PDFBox "guess" where spaces should go: private boolean enableAutoSpace = true; // True if we let PDFBox remove duplicate overlapping text: private boolean suppressDuplicateOverlappingText = false; // True if we let PDFBox ignore spaces in the content stream and rely purely on the algorithm: private boolean ignoreContentStreamSpaceGlyphs = false; // True if we extract annotation text ourselves // (workaround for PDFBOX-1143): private boolean extractAnnotationText = true; // True if we should sort text tokens by position // (necessary for some PDFs, but messes up other PDFs): private boolean sortByPosition = false; //True if acroform content should be extracted private boolean extractAcroFormContent = true; //True if bookmarks content should be extracted private boolean extractBookmarksText = true; //True if inline PDXImage objects should be extracted private boolean extractInlineImages = false; //True if inline images should only have their metadata //extracted. private boolean extractInlineImageMetadataOnly = false; private ImageGraphicsEngineFactory imageGraphicsEngineFactory = new ImageGraphicsEngineFactory(); //True if inline images (as identified by their object id within //a pdf file) should only be extracted once. private boolean extractUniqueInlineImagesOnly = true; //Should the PDFParser _try_ to extract marked content/structure tags (backoff to regular //text extraction if the given PDF doesn't have marked content) private boolean extractMarkedContent = false; //The character width-based tolerance value used to estimate where spaces in text should be // added. Default taken from PDFBox. private Float averageCharTolerance = 0.3f; //The space width-based tolerance value used to estimate where spaces in text should be added //Default taken from PDFBox. private Float spacingTolerance = 0.5f; // The multiplication factor for line height to decide when a new paragraph starts. //Default taken from PDFBox. private float dropThreshold = 2.5f; //If the PDF has an XFA element, process only that and skip extracting //content from elsewhere in the document. private boolean ifXFAExtractOnlyXFA = false; private OCR_STRATEGY ocrStrategy = OCR_STRATEGY.AUTO; // If OCR_Strategy=AUTO, then this controls the algorithm used private static final OCRStrategyAuto OCR_STRATEGY_AUTO_BETTER = new OCRStrategyAuto(10, 10); private static final OCRStrategyAuto OCR_STRATEGY_AUTO_FASTER = new OCRStrategyAuto(.1f, 10); private static final int OCR_STRATEGY_AUTO_DEFAULT_CHARS_PER_PAGE = 10; private OCRStrategyAuto ocrStrategyAuto = OCR_STRATEGY_AUTO_BETTER; private OCR_RENDERING_STRATEGY ocrRenderingStrategy = OCR_RENDERING_STRATEGY.ALL; private int ocrDPI = 300; private TikaImageType ocrImageType = TikaImageType.GRAY; private String ocrImageFormatName = "png"; private float ocrImageQuality = 1.0f; /** * Should the entire document be rendered? */ private IMAGE_STRATEGY imageStrategy = IMAGE_STRATEGY.NONE; private AccessChecker accessChecker = new AccessChecker(); //The PDFParser can throw IOExceptions if there is a problem //with a streams. If this is set to true, Tika's //parser catches these exceptions, reports them in the metadata //and then throws the first stored exception after the parse has completed. private boolean catchIntermediateIOExceptions = true; private boolean extractActions = false; private boolean extractFontNames = false; private long maxMainMemoryBytes = 512 * 1024 * 1024; private boolean setKCMS = false; private boolean detectAngles = false; private Renderer renderer; private boolean extractIncrementalUpdateInfo = true; private boolean parseIncrementalUpdates = false; int maxIncrementalUpdates = 10; private boolean throwOnEncryptedPayload = false; /** * @return whether or not to extract only inline image metadata and not render the images */ public boolean isExtractInlineImageMetadataOnly() { return extractInlineImageMetadataOnly; } /** * Use this when you want to know how many images of what formats are in a PDF * but you don't need to render the images (e.g. for OCR). This is far * faster than {@link #extractInlineImages} because it doesn't have to render the * images, which can be very slow. This does not extract metadata from * within each image, rather it extracts the XMP that may be stored * external to an image in PDImageXObjects. * * @param extractInlineImageMetadataOnly * @since 1.25 */ public void setExtractInlineImageMetadataOnly(boolean extractInlineImageMetadataOnly) { this.extractInlineImageMetadataOnly = extractInlineImageMetadataOnly; userConfigured.add("extractInlineImageMetadataOnly"); } public boolean isExtractMarkedContent() { return extractMarkedContent; } /** * If the PDF contains marked content, try to extract text and its marked structure. * If the PDF does not contain marked content, backoff to the regular PDF2XHTML for * text extraction. As of 1.24, this is an "alpha" version. * * @param extractMarkedContent * @since 1.24 */ public void setExtractMarkedContent(boolean extractMarkedContent) { this.extractMarkedContent = extractMarkedContent; userConfigured.add("extractMarkedContent"); } /** * Configures the given pdf2XHTML. * * @param pdf2XHTML */ public void configure(PDF2XHTML pdf2XHTML) { pdf2XHTML.setSortByPosition(isSortByPosition()); if (isEnableAutoSpace()) { pdf2XHTML.setWordSeparator(" "); } else { pdf2XHTML.setWordSeparator(""); } if (getAverageCharTolerance() != null) { pdf2XHTML.setAverageCharTolerance(getAverageCharTolerance()); } if (getSpacingTolerance() != null) { pdf2XHTML.setSpacingTolerance(getSpacingTolerance()); } if (getDropThreshold() != null) { pdf2XHTML.setDropThreshold(dropThreshold); } pdf2XHTML.setSuppressDuplicateOverlappingText(isSuppressDuplicateOverlappingText()); pdf2XHTML.setIgnoreContentStreamSpaceGlyphs(isIgnoreContentStreamSpaceGlyphs()); } /** * @see #setExtractAcroFormContent(boolean) */ public boolean isExtractAcroFormContent() { return extractAcroFormContent; } /** * If true (the default), extract content from AcroForms * at the end of the document. If an XFA is found, * try to process that, otherwise, process the AcroForm. * * @param extractAcroFormContent */ public void setExtractAcroFormContent(boolean extractAcroFormContent) { this.extractAcroFormContent = extractAcroFormContent; userConfigured.add("extractAcroFormContent"); } /** * @return how to handle XFA data if it exists * @see #setIfXFAExtractOnlyXFA(boolean) */ public boolean isIfXFAExtractOnlyXFA() { return ifXFAExtractOnlyXFA; } /** * If false (the default), extract content from the full PDF * as well as the XFA form. This will likely lead to some duplicative * content. * * @param ifXFAExtractOnlyXFA */ public void setIfXFAExtractOnlyXFA(boolean ifXFAExtractOnlyXFA) { this.ifXFAExtractOnlyXFA = ifXFAExtractOnlyXFA; userConfigured.add("ifXFAExtractOnlyXFA"); } /** * @see #setExtractBookmarksText(boolean) */ public boolean isExtractBookmarksText() { return extractBookmarksText; } /** * If true, extract bookmarks (document outline) text. * <p/> * Te default is <code>true</code> * * @param extractBookmarksText */ public void setExtractBookmarksText(boolean extractBookmarksText) { this.extractBookmarksText = extractBookmarksText; userConfigured.add("extractBookmarksText"); } public boolean isExtractFontNames() { return extractFontNames; } /** * Extract font names into a metadata field * * @param extractFontNames */ public void setExtractFontNames(boolean extractFontNames) { this.extractFontNames = extractFontNames; userConfigured.add("extractFontNames"); } /** * @see #setExtractInlineImages(boolean) */ public boolean isExtractInlineImages() { return extractInlineImages; } /** * If <code>true</code>, extract the literal inline embedded OBXImages. * <p/> * <b>Beware:</b> some PDF documents of modest size (~4MB) can contain * thousands of embedded images totaling &gt; 2.5 GB. Also, at least as of PDFBox 1.8.5, * there can be surprisingly large memory consumption and/or out of memory errors. * <p/> * Along the same lines, note that this does not extract "logical" images. Some PDF writers * break up a single logical image into hundreds of little images. With this option set to * <code>true</code>, you might get those hundreds of little images. * <p/> * NOTE ALSO: this extracts the raw images without clipping, rotation, masks, color * inversion, etc. The images that this extracts may look nothing like what a human * would expect given the appearance of the PDF. * <p/> * Set to <code>true</code> only with the greatest caution. * * The default is <code>false</code>. * <p/> * * @param extractInlineImages * @see #setExtractUniqueInlineImagesOnly(boolean) */ public void setExtractInlineImages(boolean extractInlineImages) { this.extractInlineImages = extractInlineImages; userConfigured.add("extractInlineImages"); } /** * @see #setExtractUniqueInlineImagesOnly(boolean) */ public boolean isExtractUniqueInlineImagesOnly() { return extractUniqueInlineImagesOnly; } /** * Multiple pages within a PDF file might refer to the same underlying image. * If {@link #extractUniqueInlineImagesOnly} is set to <code>false</code>, the * parser will call the EmbeddedExtractor each time the image appears on a page. * This might be desired for some use cases. However, to avoid duplication of * extracted images, set this to <code>true</code>. The default is <code>true</code>. * <p/> * Note that uniqueness is determined only by the underlying PDF COSObject id, not by * file hash or similar equality metric. * If the PDF actually contains multiple copies of the same image * -- all with different object ids -- then all images will be extracted. * <p/> * For this parameter to have any effect, {@link #extractInlineImages} must be * set to <code>true</code>. * <p> * Because of TIKA-1742 -- to avoid infinite recursion -- no matter the setting * of this parameter, the extractor will only pull out one copy of each image per * page. This parameter tries to capture uniqueness across the entire document. * * @param extractUniqueInlineImagesOnly */ public void setExtractUniqueInlineImagesOnly(boolean extractUniqueInlineImagesOnly) { this.extractUniqueInlineImagesOnly = extractUniqueInlineImagesOnly; userConfigured.add("extractUniqueInlineImagesOnly"); } /** * @see #setEnableAutoSpace(boolean) */ public boolean isEnableAutoSpace() { return enableAutoSpace; } /** * If true (the default), the parser should estimate * where spaces should be inserted between words. For * many PDFs this is necessary as they do not include * explicit whitespace characters. */ public void setEnableAutoSpace(boolean enableAutoSpace) { this.enableAutoSpace = enableAutoSpace; userConfigured.add("enableAutoSpace"); } /** * @see #setSuppressDuplicateOverlappingText(boolean) */ public boolean isSuppressDuplicateOverlappingText() { return suppressDuplicateOverlappingText; } /** * If true, the parser should try to remove duplicated * text over the same region. This is needed for some * PDFs that achieve bolding by re-writing the same * text in the same area. Note that this can * slow down extraction substantially (PDFBOX-956) and * sometimes remove characters that were not in fact * duplicated (PDFBOX-1155). By default this is disabled. */ public void setSuppressDuplicateOverlappingText(boolean suppressDuplicateOverlappingText) { this.suppressDuplicateOverlappingText = suppressDuplicateOverlappingText; userConfigured.add("suppressDuplicateOverlappingText"); } /** * @see #setIgnoreContentStreamSpaceGlyphs(boolean) */ public boolean isIgnoreContentStreamSpaceGlyphs() { return ignoreContentStreamSpaceGlyphs; } /** * If true, the parser should ignore spaces in the content stream and rely purely on the * algorithm to determine where word breaks are (PDFBOX-3774). This can improve text extraction * results where the content stream is sorted by position and has text overlapping spaces, but * could cause some word breaks to not be added to the output. By default this is disabled. */ public void setIgnoreContentStreamSpaceGlyphs(boolean ignoreContentStreamSpaceGlyphs) { this.ignoreContentStreamSpaceGlyphs = ignoreContentStreamSpaceGlyphs; userConfigured.add("ignoreContentStreamSpaceGlyphs"); } /** * @see #setExtractAnnotationText(boolean) */ public boolean isExtractAnnotationText() { return extractAnnotationText; } /** * If true (the default), text in annotations will be * extracted. */ public void setExtractAnnotationText(boolean extractAnnotationText) { this.extractAnnotationText = extractAnnotationText; userConfigured.add("extractAnnotationText"); } /** * @see #setSortByPosition(boolean) */ public boolean isSortByPosition() { return sortByPosition; } /** * If true, sort text tokens by their x/y position * before extracting text. This may be necessary for * some PDFs (if the text tokens are not rendered "in * order"), while for other PDFs it can produce the * wrong result (for example if there are 2 columns, * the text will be interleaved). Default is false. */ public void setSortByPosition(boolean sortByPosition) { this.sortByPosition = sortByPosition; userConfigured.add("sortByPosition"); } /** * @see #setAverageCharTolerance(Float) */ public Float getAverageCharTolerance() { return averageCharTolerance; } /** * See {@link PDFTextStripper#setAverageCharTolerance(float)} */ public void setAverageCharTolerance(Float averageCharTolerance) { this.averageCharTolerance = averageCharTolerance; userConfigured.add("averageCharTolerance"); } /** * @see #setSpacingTolerance(Float) */ public Float getSpacingTolerance() { return spacingTolerance; } /** * See {@link PDFTextStripper#setSpacingTolerance(float)} */ public void setSpacingTolerance(Float spacingTolerance) { this.spacingTolerance = spacingTolerance; userConfigured.add("spacingTolerance"); } /** * @see #setDropThreshold(Float) */ public Float getDropThreshold() { return dropThreshold; } /** * See {@link PDFTextStripper#setDropThreshold(float)} */ public void setDropThreshold(Float dropThreshold) { this.dropThreshold = dropThreshold; userConfigured.add("dropThreshold"); } public AccessChecker getAccessChecker() { return accessChecker; } public void setAccessChecker(AccessChecker accessChecker) { this.accessChecker = accessChecker; userConfigured.add("accessChecker"); } /** * See {@link #setCatchIntermediateIOExceptions(boolean)} * * @return whether or not to catch IOExceptions */ public boolean isCatchIntermediateIOExceptions() { return catchIntermediateIOExceptions; } /** * The PDFBox parser will throw an IOException if there is * a problem with a stream. If this is set to <code>true</code>, * Tika's PDFParser will catch these exceptions and try to parse * the rest of the document. After the parse is completed, * Tika's PDFParser will throw the first caught exception. * * @param catchIntermediateIOExceptions */ public void setCatchIntermediateIOExceptions(boolean catchIntermediateIOExceptions) { this.catchIntermediateIOExceptions = catchIntermediateIOExceptions; userConfigured.add("catchIntermediateIOExceptions"); } /** * @return strategy to use for OCR */ public OCR_STRATEGY getOcrStrategy() { return ocrStrategy; } /** * @return ocr auto strategy to use when ocr_strategy = Auto */ public OCRStrategyAuto getOcrStrategyAuto() { return ocrStrategyAuto; } /** * Which strategy to use for OCR * * @param ocrStrategy */ public void setOcrStrategy(OCR_STRATEGY ocrStrategy) { this.ocrStrategy = ocrStrategy; userConfigured.add("ocrStrategy"); } public void setOcrStrategyAuto(String ocrStrategyAuto) { final String regex = "^\\s*(faster|better)|(\\d{1,3})(%)?(?:,\\s*(\\d{1,3}))?\\s*$"; Pattern pattern = Pattern.compile(regex); Matcher matcher = pattern.matcher(ocrStrategyAuto); if (matcher.matches()) { final String group1 = matcher.group(1); if ("better".equals(group1)) { this.ocrStrategyAuto = OCR_STRATEGY_AUTO_BETTER; } else if ("faster".equals(group1)) { this.ocrStrategyAuto = OCR_STRATEGY_AUTO_FASTER; } else { float unmappedUnicodeCharsPerPage = Integer.parseInt(matcher.group(2)); if (matcher.group(3) != null) { // If we have the percent sign, then convert if (unmappedUnicodeCharsPerPage > 100.0) { throw new IllegalArgumentException ("Error parsing OCRStrategyAuto - Percent cannot exceed 100%"); } unmappedUnicodeCharsPerPage = unmappedUnicodeCharsPerPage / 100f; } // The 2nd number is optional. Default to 10 chars per page int totalCharsPerPage = matcher.group(4) == null ? OCR_STRATEGY_AUTO_DEFAULT_CHARS_PER_PAGE : Integer.parseInt(matcher.group(4)); this.ocrStrategyAuto = new OCRStrategyAuto(unmappedUnicodeCharsPerPage, totalCharsPerPage); } userConfigured.add("ocrStrategyAuto"); } else { throw new IllegalArgumentException("Error parsing OCRStrategyAuto - Must be in the form 'num[%], num'"); } } /** * Which strategy to use for OCR * * @param ocrStrategyString */ public void setOcrStrategy(String ocrStrategyString) { setOcrStrategy(OCR_STRATEGY.parse(ocrStrategyString)); } public OCR_RENDERING_STRATEGY getOcrRenderingStrategy() { return ocrRenderingStrategy; } public void setOcrRenderingStrategy(String ocrRenderingStrategyString) { setOcrRenderingStrategy(OCR_RENDERING_STRATEGY.parse(ocrRenderingStrategyString)); } /** * When rendering the page for OCR, do you want to include the rendering of the electronic text, * ALL, or do you only want to run OCR on the images and vector graphics (NO_TEXT)? * * @param ocrRenderingStrategy */ public void setOcrRenderingStrategy(OCR_RENDERING_STRATEGY ocrRenderingStrategy) { this.ocrRenderingStrategy = ocrRenderingStrategy; userConfigured.add("ocrRenderingStrategy"); } /** * String representation of the image format used to render * the page image for OCR (examples: png, tiff, jpeg) * * @return */ public String getOcrImageFormatName() { return ocrImageFormatName; } /** * @param ocrImageFormatName name of image format used to render * page image * @see #getOcrImageFormatName() */ public void setOcrImageFormatName(String ocrImageFormatName) { if (!ocrImageFormatName.equals("png") && !ocrImageFormatName.equals("tiff") && !ocrImageFormatName.equals("jpeg")) { throw new IllegalArgumentException( "Available options: png, tiff, jpeg. " + "I'm sorry, but I don't recognize: " + ocrImageFormatName); } this.ocrImageFormatName = ocrImageFormatName; userConfigured.add("ocrImageFormatName"); } /** * Image type used to render the page image for OCR. * * @return image type * @see #setOcrImageType(TikaImageType) */ public TikaImageType getOcrImageType() { return ocrImageType; } /** * Image type used to render the page image for OCR. * * @param ocrImageType */ public void setOcrImageType(TikaImageType ocrImageType) { this.ocrImageType = ocrImageType; userConfigured.add("ocrImageType"); } /** * Image type used to render the page image for OCR. * * @see #setOcrImageType(TikaImageType) */ public void setOcrImageType(String ocrImageTypeString) { setOcrImageType(parseImageType(ocrImageTypeString)); } /** * Dots per inch used to render the page image for OCR * * @return dots per inch */ public int getOcrDPI() { return ocrDPI; } /** * Dots per inch used to render the page image for OCR. * This does not apply to all image formats. * * @param ocrDPI */ public void setOcrDPI(int ocrDPI) { this.ocrDPI = ocrDPI; userConfigured.add("ocrDPI"); } /** * Image quality used to render the page image for OCR. * This does not apply to all image formats * * @return */ public float getOcrImageQuality() { return ocrImageQuality; } /** * Image quality used to render the page image for OCR. * This does not apply to all image formats */ public void setOcrImageQuality(float ocrImageQuality) { this.ocrImageQuality = ocrImageQuality; userConfigured.add("ocrImageQuality"); } /** * @return whether or not to extract PDActions * @see #setExtractActions(boolean) */ public boolean isExtractActions() { return extractActions; } /** * Whether or not to extract PDActions from the file. * Most Action types are handled inline; javascript macros * are processed as embedded documents. * * @param v */ public void setExtractActions(boolean v) { extractActions = v; userConfigured.add("extractActions"); } /** * The maximum amount of memory to use when loading a pdf into a PDDocument. Additional * buffering is done using a temp file. The default is 512MB. * * @return */ public long getMaxMainMemoryBytes() { return maxMainMemoryBytes; } public void setMaxMainMemoryBytes(long maxMainMemoryBytes) { this.maxMainMemoryBytes = maxMainMemoryBytes; userConfigured.add("maxMainMemoryBytes"); } public boolean isSetKCMS() { return setKCMS; } /** * <p> * Whether to call <code>System.setProperty("sun.java2d.cmm", * "sun.java2d.cmm.kcms.KcmsServiceProvider")</code>. * KCMS is the unmaintained, legacy provider and is far faster than the newer replacement. * However, there are stability and security risks with using the unmaintained legacy provider. * </p> * <p> * Note, of course, that this is <b>not</b> thread safe. If the value is <code>false</code> * in your first thread, and the second thread changes this to <code>true</code>, * the system property in the first thread will now be <code>true</code>. * </p> * <p> * Default is <code>false</code>. * </p> * * @param setKCMS whether or not to set KCMS */ public void setSetKCMS(boolean setKCMS) { this.setKCMS = setKCMS; userConfigured.add("setKCMS"); } private TikaImageType parseImageType(String ocrImageType) { for (TikaImageType t : TikaImageType.values()) { if (ocrImageType.equalsIgnoreCase(t.toString())) { return t; } } StringBuilder sb = new StringBuilder(); sb.append("I regret that I could not parse '"); sb.append(ocrImageType); sb.append("'. I'm only familiar with: "); int i = 0; for (ImageType t : ImageType.values()) { if (i++ == 0) { sb.append(", "); } sb.append(t.toString()); } throw new IllegalArgumentException(sb.toString()); } public boolean isDetectAngles() { return detectAngles; } public void setDetectAngles(boolean detectAngles) { this.detectAngles = detectAngles; userConfigured.add("detectAngles"); } public PDFParserConfig cloneAndUpdate(PDFParserConfig updates) throws TikaException { PDFParserConfig updated = new PDFParserConfig(); for (Field field : this.getClass().getDeclaredFields()) { if (Modifier.isFinal(field.getModifiers())) { continue; } else if (Modifier.isStatic(field.getModifiers())) { continue; } if ("userConfigured".equals(field.getName())) { continue; } if (updates.userConfigured.contains(field.getName())) { try { field.set(updated, field.get(updates)); } catch (IllegalAccessException e) { throw new TikaException("can't update " + field.getName(), e); } } else { try { field.set(updated, field.get(this)); } catch (IllegalAccessException e) { throw new TikaException("can't update " + field.getName(), e); } } } return updated; } public void setRenderer(Renderer renderer) { this.renderer = renderer; userConfigured.add("renderer"); } public Renderer getRenderer() { return renderer; } public void setImageStrategy(String imageStrategy) { setImageStrategy(PDFParserConfig.IMAGE_STRATEGY.parse(imageStrategy)); } public void setImageStrategy(IMAGE_STRATEGY imageStrategy) { this.imageStrategy = imageStrategy; userConfigured.add("imageStrategy"); } /** * EXPERT: Customize the class that handles inline images within a PDF page. * * @param imageGraphicsEngineFactory */ public void setImageGraphicsEngineFactory(ImageGraphicsEngineFactory imageGraphicsEngineFactory) { this.imageGraphicsEngineFactory = imageGraphicsEngineFactory; userConfigured.add("imageGraphicsEngineFactory"); } public ImageGraphicsEngineFactory getImageGraphicsEngineFactory() { return imageGraphicsEngineFactory; } public IMAGE_STRATEGY getImageStrategy() { return imageStrategy; } public boolean isExtractIncrementalUpdateInfo() { return extractIncrementalUpdateInfo; } public void setExtractIncrementalUpdateInfo(boolean extractIncrementalUpdateInfo) { this.extractIncrementalUpdateInfo = extractIncrementalUpdateInfo; userConfigured.add("extractIncrementalUpdateInfo"); } public boolean isParseIncrementalUpdates() { return parseIncrementalUpdates; } public void setParseIncrementalUpdates(boolean parseIncrementalUpdates) { this.parseIncrementalUpdates = parseIncrementalUpdates; userConfigured.add("parseIncrementalUpdates"); } public int getMaxIncrementalUpdates() { return maxIncrementalUpdates; } /** * The maximum number of incremental updates to parse if * {@link #setParseIncrementalUpdates(boolean)} is set to <code>true</code> * * @param maxIncrementalUpdates */ public void setMaxIncrementalUpdates(int maxIncrementalUpdates) { this.maxIncrementalUpdates = maxIncrementalUpdates; userConfigured.add("maxIncrementalUpdates"); } public void setThrowOnEncryptedPayload(boolean throwOnEncryptedPayload) { this.throwOnEncryptedPayload = throwOnEncryptedPayload; userConfigured.add("throwOnEncryptedPayload"); } public boolean isThrowOnEncryptedPayload() { return throwOnEncryptedPayload; } public enum OCR_STRATEGY { AUTO, NO_OCR, OCR_ONLY, OCR_AND_TEXT_EXTRACTION; private static OCR_STRATEGY parse(String s) { if (s == null) { return NO_OCR; } else if ("no_ocr".equals(s.toLowerCase(Locale.ROOT))) { return NO_OCR; } else if ("ocr_only".equals(s.toLowerCase(Locale.ROOT))) { return OCR_ONLY; } else if (s.toLowerCase(Locale.ROOT).contains("ocr_and_text")) { return OCR_AND_TEXT_EXTRACTION; } else if ("auto".equals(s.toLowerCase(Locale.ROOT))) { return AUTO; } StringBuilder sb = new StringBuilder(); sb.append("I regret that I don't recognize '").append(s); sb.append("' as an OCR_STRATEGY. I only recognize:"); int i = 0; for (OCR_STRATEGY strategy : OCR_STRATEGY.values()) { if (i++ > 0) { sb.append(", "); } sb.append(strategy.toString()); } throw new IllegalArgumentException(sb.toString()); } } /** * Encapsulate the numbers used to control OCR Strategy when set to auto * <p> * If the total characters on the page < this.totalCharsPerPage * or * total unmapped unicode characters on the page > this.unmappedUnicodeCharsPerPage * then we will perform OCR on the page * <p> * If unamppedUnicodeCharsPerPage is an integer > 0, then we compare absolute number of characters. * If it is a float < 1, then we assume it is a percentage and we compare it to the * percentage of unmappedCharactersPerPage/totalCharsPerPage */ public static class OCRStrategyAuto implements Serializable { private final float unmappedUnicodeCharsPerPage; private final int totalCharsPerPage; public OCRStrategyAuto(float unmappedUnicodeCharsPerPage, int totalCharsPerPage) { this.totalCharsPerPage = totalCharsPerPage; this.unmappedUnicodeCharsPerPage = unmappedUnicodeCharsPerPage; } public float getUnmappedUnicodeCharsPerPage() { return unmappedUnicodeCharsPerPage; } public int getTotalCharsPerPage() { return totalCharsPerPage; } @Override public String toString() { //TODO -- figure out if this is actual BEST or whatever //and return that instead of the literal values String unmappedString = null; if (unmappedUnicodeCharsPerPage < 1.0) { unmappedString = String.format(Locale.US, "%.03f", unmappedUnicodeCharsPerPage * 100) + "%"; } else { unmappedString = String.format(Locale.US, "%.0f", unmappedUnicodeCharsPerPage); } return unmappedString + "," + totalCharsPerPage; } } public enum OCR_RENDERING_STRATEGY { NO_TEXT, //includes vector graphics and image TEXT_ONLY, //renders only glyphs VECTOR_GRAPHICS_ONLY, //renders only vector graphics ALL; //TODO: add AUTO? private static OCR_RENDERING_STRATEGY parse(String s) { if (s == null) { return ALL; } String lc = s.toLowerCase(Locale.US); switch (lc) { case "vector_graphics_only": return VECTOR_GRAPHICS_ONLY; case "text_only": return TEXT_ONLY; case "no_text": return NO_TEXT; case "all": return ALL; } StringBuilder sb = new StringBuilder(); sb.append("I regret that I don't recognize '").append(s); sb.append("' as an OCR_STRATEGY. I only recognize:"); int i = 0; for (OCR_RENDERING_STRATEGY strategy : OCR_RENDERING_STRATEGY.values()) { if (i++ > 0) { sb.append(", "); } sb.append(strategy.toString()); } throw new IllegalArgumentException(sb.toString()); } } public enum IMAGE_STRATEGY { NONE, /** * This is the more modern version of {@link PDFParserConfig#extractInlineImages} */ RAW_IMAGES, /** * If you want the rendered images, and you don't care that there's * markup in the xhtml handler per page then go with this option. * For some rendering engines, it is faster to render the full document * upfront than to parse a page, render a page, etc. */ RENDER_PAGES_BEFORE_PARSE, /** * This renders each page, one at a time, at the end of the page. * For some rendering engines, this may be slower, but it allows the writing * of image metadata into the xhtml in the proper location */ RENDER_PAGES_AT_PAGE_END; //TODO: add LOGICAL_IMAGES private static IMAGE_STRATEGY parse(String s) { String lc = s.toLowerCase(Locale.US); switch (lc) { case "rawimages" : return RAW_IMAGES; case "renderpagesbeforeparse": return RENDER_PAGES_BEFORE_PARSE; case "renderpagesatpageend": return RENDER_PAGES_AT_PAGE_END; case "none": return NONE; default: //fall through to exception break; } StringBuilder sb = new StringBuilder(); sb.append("I regret that I don't recognize '").append(s); sb.append("' as an IMAGE_STRATEGY. I only recognize:"); int i = 0; for (IMAGE_STRATEGY strategy : IMAGE_STRATEGY.values()) { if (i++ > 0) { sb.append(", "); } sb.append(strategy.toString()); } throw new IllegalArgumentException(sb.toString()); } } }
googleapis/google-api-java-client-services
38,053
clients/google-api-services-compute/alpha/1.30.1/com/google/api/services/compute/model/Image.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents an Image resource. * * You can use images to create boot disks for your VM instances. For more information, read Images. * (== resource_for {$api_version}.images ==) * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Image extends com.google.api.client.json.GenericJson { /** * Size of the image tar.gz archive stored in Google Cloud Storage (in bytes). * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long archiveSizeBytes; /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * The deprecation status associated with this image. * The value may be {@code null}. */ @com.google.api.client.util.Key private DeprecationStatus deprecated; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Size of the image when restored onto a persistent disk (in GB). * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long diskSizeGb; /** * The name of the image family to which this image belongs. You can create disks by specifying an * image family instead of a specific image name. The image family always returns its latest image * that is not deprecated. The name of the image family must comply with RFC1035. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String family; /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GuestOsFeature> guestOsFeatures; static { // hack to force ProGuard to consider GuestOsFeature used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GuestOsFeature.class); } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * Encrypts the image using a customer-supplied encryption key. * * After you encrypt an image with a customer-supplied key, you must provide the same key if you * use the image later (e.g. to create a disk from the image). * * Customer-supplied encryption keys do not protect access to metadata of the disk. * * If you do not provide an encryption key when creating the image, then the disk will be * encrypted using an automatically generated key and you do not need to provide a key to use the * image later. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey imageEncryptionKey; /** * [Output Only] Type of the resource. Always compute#image for images. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A fingerprint for the labels being applied to this image, which is essentially a hash of the * labels used for optimistic locking. The fingerprint is initially generated by Compute Engine * and changes after every request to modify or update labels. You must always provide an up-to- * date fingerprint hash in order to update or change labels, otherwise the request will fail with * error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an image. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String labelFingerprint; /** * Labels to apply to this image. These can be later modified by the setLabels method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Integer license codes indicating which licenses are attached to this image. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.util.List<java.lang.Long> licenseCodes; /** * Any applicable license URI. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> licenses; /** * Name of the resource; provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * The parameters of the raw disk image. * The value may be {@code null}. */ @com.google.api.client.util.Key private RawDisk rawDisk; /** * [Output Only] Server-defined URL for the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * [Output Only] Server-defined URL for this resource's resource id. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLinkWithId; /** * Set the secure boot keys of shielded instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private InitialStateConfig shieldedInstanceInitialState; /** * URL of the source disk used to create this image. This can be a full or valid partial URL. You * must provide either this property or the rawDisk.source property but not both to create an * image. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk - * projects/project/zones/zone/disks/disk - zones/zone/disks/disk * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceDisk; /** * The customer-supplied encryption key of the source disk. Required if the source disk is * protected by a customer-supplied encryption key. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceDiskEncryptionKey; /** * [Output Only] The ID value of the disk used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given disk * name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceDiskId; /** * URL of the source image used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The rawDisk.source URL - The sourceDisk URL * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceImage; /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceImageEncryptionKey; /** * [Output Only] The ID value of the image used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given image * name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceImageId; /** * URL of the source snapshot used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The sourceImage URL - The rawDisk.source URL - The * sourceDisk URL * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceSnapshot; /** * The customer-supplied encryption key of the source snapshot. Required if the source snapshot is * protected by a customer-supplied encryption key. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceSnapshotEncryptionKey; /** * [Output Only] The ID value of the snapshot used to create this image. This value may be used to * determine whether the snapshot was taken from the current or a previous instance of a given * snapshot name. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceSnapshotId; /** * The type of the image used to create this disk. The default and only value is RAW * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceType; /** * [Output Only] The status of the image. An image can be used to create other resources, such as * instances, only after the image has been successfully created and the status is set to READY. * Possible values are FAILED, PENDING, or READY. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * Cloud Storage bucket storage location of the image (regional or multi-regional). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> storageLocations; /** * Size of the image tar.gz archive stored in Google Cloud Storage (in bytes). * @return value or {@code null} for none */ public java.lang.Long getArchiveSizeBytes() { return archiveSizeBytes; } /** * Size of the image tar.gz archive stored in Google Cloud Storage (in bytes). * @param archiveSizeBytes archiveSizeBytes or {@code null} for none */ public Image setArchiveSizeBytes(java.lang.Long archiveSizeBytes) { this.archiveSizeBytes = archiveSizeBytes; return this; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public Image setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * The deprecation status associated with this image. * @return value or {@code null} for none */ public DeprecationStatus getDeprecated() { return deprecated; } /** * The deprecation status associated with this image. * @param deprecated deprecated or {@code null} for none */ public Image setDeprecated(DeprecationStatus deprecated) { this.deprecated = deprecated; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public Image setDescription(java.lang.String description) { this.description = description; return this; } /** * Size of the image when restored onto a persistent disk (in GB). * @return value or {@code null} for none */ public java.lang.Long getDiskSizeGb() { return diskSizeGb; } /** * Size of the image when restored onto a persistent disk (in GB). * @param diskSizeGb diskSizeGb or {@code null} for none */ public Image setDiskSizeGb(java.lang.Long diskSizeGb) { this.diskSizeGb = diskSizeGb; return this; } /** * The name of the image family to which this image belongs. You can create disks by specifying an * image family instead of a specific image name. The image family always returns its latest image * that is not deprecated. The name of the image family must comply with RFC1035. * @return value or {@code null} for none */ public java.lang.String getFamily() { return family; } /** * The name of the image family to which this image belongs. You can create disks by specifying an * image family instead of a specific image name. The image family always returns its latest image * that is not deprecated. The name of the image family must comply with RFC1035. * @param family family or {@code null} for none */ public Image setFamily(java.lang.String family) { this.family = family; return this; } /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. * @return value or {@code null} for none */ public java.util.List<GuestOsFeature> getGuestOsFeatures() { return guestOsFeatures; } /** * A list of features to enable on the guest operating system. Applicable only for bootable * images. Read Enabling guest operating system features to see a list of available options. * @param guestOsFeatures guestOsFeatures or {@code null} for none */ public Image setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) { this.guestOsFeatures = guestOsFeatures; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public Image setId(java.math.BigInteger id) { this.id = id; return this; } /** * Encrypts the image using a customer-supplied encryption key. * * After you encrypt an image with a customer-supplied key, you must provide the same key if you * use the image later (e.g. to create a disk from the image). * * Customer-supplied encryption keys do not protect access to metadata of the disk. * * If you do not provide an encryption key when creating the image, then the disk will be * encrypted using an automatically generated key and you do not need to provide a key to use the * image later. * @return value or {@code null} for none */ public CustomerEncryptionKey getImageEncryptionKey() { return imageEncryptionKey; } /** * Encrypts the image using a customer-supplied encryption key. * * After you encrypt an image with a customer-supplied key, you must provide the same key if you * use the image later (e.g. to create a disk from the image). * * Customer-supplied encryption keys do not protect access to metadata of the disk. * * If you do not provide an encryption key when creating the image, then the disk will be * encrypted using an automatically generated key and you do not need to provide a key to use the * image later. * @param imageEncryptionKey imageEncryptionKey or {@code null} for none */ public Image setImageEncryptionKey(CustomerEncryptionKey imageEncryptionKey) { this.imageEncryptionKey = imageEncryptionKey; return this; } /** * [Output Only] Type of the resource. Always compute#image for images. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of the resource. Always compute#image for images. * @param kind kind or {@code null} for none */ public Image setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A fingerprint for the labels being applied to this image, which is essentially a hash of the * labels used for optimistic locking. The fingerprint is initially generated by Compute Engine * and changes after every request to modify or update labels. You must always provide an up-to- * date fingerprint hash in order to update or change labels, otherwise the request will fail with * error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an image. * @see #decodeLabelFingerprint() * @return value or {@code null} for none */ public java.lang.String getLabelFingerprint() { return labelFingerprint; } /** * A fingerprint for the labels being applied to this image, which is essentially a hash of the * labels used for optimistic locking. The fingerprint is initially generated by Compute Engine * and changes after every request to modify or update labels. You must always provide an up-to- * date fingerprint hash in order to update or change labels, otherwise the request will fail with * error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an image. * @see #getLabelFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeLabelFingerprint() { return com.google.api.client.util.Base64.decodeBase64(labelFingerprint); } /** * A fingerprint for the labels being applied to this image, which is essentially a hash of the * labels used for optimistic locking. The fingerprint is initially generated by Compute Engine * and changes after every request to modify or update labels. You must always provide an up-to- * date fingerprint hash in order to update or change labels, otherwise the request will fail with * error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an image. * @see #encodeLabelFingerprint() * @param labelFingerprint labelFingerprint or {@code null} for none */ public Image setLabelFingerprint(java.lang.String labelFingerprint) { this.labelFingerprint = labelFingerprint; return this; } /** * A fingerprint for the labels being applied to this image, which is essentially a hash of the * labels used for optimistic locking. The fingerprint is initially generated by Compute Engine * and changes after every request to modify or update labels. You must always provide an up-to- * date fingerprint hash in order to update or change labels, otherwise the request will fail with * error 412 conditionNotMet. * * To see the latest fingerprint, make a get() request to retrieve an image. * @see #setLabelFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public Image encodeLabelFingerprint(byte[] labelFingerprint) { this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint); return this; } /** * Labels to apply to this image. These can be later modified by the setLabels method. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Labels to apply to this image. These can be later modified by the setLabels method. * @param labels labels or {@code null} for none */ public Image setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Integer license codes indicating which licenses are attached to this image. * @return value or {@code null} for none */ public java.util.List<java.lang.Long> getLicenseCodes() { return licenseCodes; } /** * Integer license codes indicating which licenses are attached to this image. * @param licenseCodes licenseCodes or {@code null} for none */ public Image setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) { this.licenseCodes = licenseCodes; return this; } /** * Any applicable license URI. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getLicenses() { return licenses; } /** * Any applicable license URI. * @param licenses licenses or {@code null} for none */ public Image setLicenses(java.util.List<java.lang.String> licenses) { this.licenses = licenses; return this; } /** * Name of the resource; provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the resource; provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public Image setName(java.lang.String name) { this.name = name; return this; } /** * The parameters of the raw disk image. * @return value or {@code null} for none */ public RawDisk getRawDisk() { return rawDisk; } /** * The parameters of the raw disk image. * @param rawDisk rawDisk or {@code null} for none */ public Image setRawDisk(RawDisk rawDisk) { this.rawDisk = rawDisk; return this; } /** * [Output Only] Server-defined URL for the resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for the resource. * @param selfLink selfLink or {@code null} for none */ public Image setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * [Output Only] Server-defined URL for this resource's resource id. * @return value or {@code null} for none */ public java.lang.String getSelfLinkWithId() { return selfLinkWithId; } /** * [Output Only] Server-defined URL for this resource's resource id. * @param selfLinkWithId selfLinkWithId or {@code null} for none */ public Image setSelfLinkWithId(java.lang.String selfLinkWithId) { this.selfLinkWithId = selfLinkWithId; return this; } /** * Set the secure boot keys of shielded instance. * @return value or {@code null} for none */ public InitialStateConfig getShieldedInstanceInitialState() { return shieldedInstanceInitialState; } /** * Set the secure boot keys of shielded instance. * @param shieldedInstanceInitialState shieldedInstanceInitialState or {@code null} for none */ public Image setShieldedInstanceInitialState(InitialStateConfig shieldedInstanceInitialState) { this.shieldedInstanceInitialState = shieldedInstanceInitialState; return this; } /** * URL of the source disk used to create this image. This can be a full or valid partial URL. You * must provide either this property or the rawDisk.source property but not both to create an * image. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk - * projects/project/zones/zone/disks/disk - zones/zone/disks/disk * @return value or {@code null} for none */ public java.lang.String getSourceDisk() { return sourceDisk; } /** * URL of the source disk used to create this image. This can be a full or valid partial URL. You * must provide either this property or the rawDisk.source property but not both to create an * image. For example, the following are valid values: - * https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk - * projects/project/zones/zone/disks/disk - zones/zone/disks/disk * @param sourceDisk sourceDisk or {@code null} for none */ public Image setSourceDisk(java.lang.String sourceDisk) { this.sourceDisk = sourceDisk; return this; } /** * The customer-supplied encryption key of the source disk. Required if the source disk is * protected by a customer-supplied encryption key. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceDiskEncryptionKey() { return sourceDiskEncryptionKey; } /** * The customer-supplied encryption key of the source disk. Required if the source disk is * protected by a customer-supplied encryption key. * @param sourceDiskEncryptionKey sourceDiskEncryptionKey or {@code null} for none */ public Image setSourceDiskEncryptionKey(CustomerEncryptionKey sourceDiskEncryptionKey) { this.sourceDiskEncryptionKey = sourceDiskEncryptionKey; return this; } /** * [Output Only] The ID value of the disk used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given disk * name. * @return value or {@code null} for none */ public java.lang.String getSourceDiskId() { return sourceDiskId; } /** * [Output Only] The ID value of the disk used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given disk * name. * @param sourceDiskId sourceDiskId or {@code null} for none */ public Image setSourceDiskId(java.lang.String sourceDiskId) { this.sourceDiskId = sourceDiskId; return this; } /** * URL of the source image used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The rawDisk.source URL - The sourceDisk URL * @return value or {@code null} for none */ public java.lang.String getSourceImage() { return sourceImage; } /** * URL of the source image used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The rawDisk.source URL - The sourceDisk URL * @param sourceImage sourceImage or {@code null} for none */ public Image setSourceImage(java.lang.String sourceImage) { this.sourceImage = sourceImage; return this; } /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceImageEncryptionKey() { return sourceImageEncryptionKey; } /** * The customer-supplied encryption key of the source image. Required if the source image is * protected by a customer-supplied encryption key. * @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none */ public Image setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) { this.sourceImageEncryptionKey = sourceImageEncryptionKey; return this; } /** * [Output Only] The ID value of the image used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given image * name. * @return value or {@code null} for none */ public java.lang.String getSourceImageId() { return sourceImageId; } /** * [Output Only] The ID value of the image used to create this image. This value may be used to * determine whether the image was taken from the current or a previous instance of a given image * name. * @param sourceImageId sourceImageId or {@code null} for none */ public Image setSourceImageId(java.lang.String sourceImageId) { this.sourceImageId = sourceImageId; return this; } /** * URL of the source snapshot used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The sourceImage URL - The rawDisk.source URL - The * sourceDisk URL * @return value or {@code null} for none */ public java.lang.String getSourceSnapshot() { return sourceSnapshot; } /** * URL of the source snapshot used to create this image. * * In order to create an image, you must provide the full or partial URL of one of the following: * - The selfLink URL - This property - The sourceImage URL - The rawDisk.source URL - The * sourceDisk URL * @param sourceSnapshot sourceSnapshot or {@code null} for none */ public Image setSourceSnapshot(java.lang.String sourceSnapshot) { this.sourceSnapshot = sourceSnapshot; return this; } /** * The customer-supplied encryption key of the source snapshot. Required if the source snapshot is * protected by a customer-supplied encryption key. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceSnapshotEncryptionKey() { return sourceSnapshotEncryptionKey; } /** * The customer-supplied encryption key of the source snapshot. Required if the source snapshot is * protected by a customer-supplied encryption key. * @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none */ public Image setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) { this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey; return this; } /** * [Output Only] The ID value of the snapshot used to create this image. This value may be used to * determine whether the snapshot was taken from the current or a previous instance of a given * snapshot name. * @return value or {@code null} for none */ public java.lang.String getSourceSnapshotId() { return sourceSnapshotId; } /** * [Output Only] The ID value of the snapshot used to create this image. This value may be used to * determine whether the snapshot was taken from the current or a previous instance of a given * snapshot name. * @param sourceSnapshotId sourceSnapshotId or {@code null} for none */ public Image setSourceSnapshotId(java.lang.String sourceSnapshotId) { this.sourceSnapshotId = sourceSnapshotId; return this; } /** * The type of the image used to create this disk. The default and only value is RAW * @return value or {@code null} for none */ public java.lang.String getSourceType() { return sourceType; } /** * The type of the image used to create this disk. The default and only value is RAW * @param sourceType sourceType or {@code null} for none */ public Image setSourceType(java.lang.String sourceType) { this.sourceType = sourceType; return this; } /** * [Output Only] The status of the image. An image can be used to create other resources, such as * instances, only after the image has been successfully created and the status is set to READY. * Possible values are FAILED, PENDING, or READY. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * [Output Only] The status of the image. An image can be used to create other resources, such as * instances, only after the image has been successfully created and the status is set to READY. * Possible values are FAILED, PENDING, or READY. * @param status status or {@code null} for none */ public Image setStatus(java.lang.String status) { this.status = status; return this; } /** * Cloud Storage bucket storage location of the image (regional or multi-regional). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getStorageLocations() { return storageLocations; } /** * Cloud Storage bucket storage location of the image (regional or multi-regional). * @param storageLocations storageLocations or {@code null} for none */ public Image setStorageLocations(java.util.List<java.lang.String> storageLocations) { this.storageLocations = storageLocations; return this; } @Override public Image set(String fieldName, Object value) { return (Image) super.set(fieldName, value); } @Override public Image clone() { return (Image) super.clone(); } /** * The parameters of the raw disk image. */ public static final class RawDisk extends com.google.api.client.json.GenericJson { /** * The format used to encode and transmit the block device, which should be TAR. This is just a * container and transmission format and not a runtime format. Provided by the client when the * disk image is created. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String containerType; /** * [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before * unpackaging provided by the client when the disk image is created. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sha1Checksum; /** * The full Google Cloud Storage URL where the disk image is stored. You must provide either this * property or the sourceDisk property but not both. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String source; /** * The format used to encode and transmit the block device, which should be TAR. This is just a * container and transmission format and not a runtime format. Provided by the client when the * disk image is created. * @return value or {@code null} for none */ public java.lang.String getContainerType() { return containerType; } /** * The format used to encode and transmit the block device, which should be TAR. This is just a * container and transmission format and not a runtime format. Provided by the client when the * disk image is created. * @param containerType containerType or {@code null} for none */ public RawDisk setContainerType(java.lang.String containerType) { this.containerType = containerType; return this; } /** * [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before * unpackaging provided by the client when the disk image is created. * @return value or {@code null} for none */ public java.lang.String getSha1Checksum() { return sha1Checksum; } /** * [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before * unpackaging provided by the client when the disk image is created. * @param sha1Checksum sha1Checksum or {@code null} for none */ public RawDisk setSha1Checksum(java.lang.String sha1Checksum) { this.sha1Checksum = sha1Checksum; return this; } /** * The full Google Cloud Storage URL where the disk image is stored. You must provide either this * property or the sourceDisk property but not both. * @return value or {@code null} for none */ public java.lang.String getSource() { return source; } /** * The full Google Cloud Storage URL where the disk image is stored. You must provide either this * property or the sourceDisk property but not both. * @param source source or {@code null} for none */ public RawDisk setSource(java.lang.String source) { this.source = source; return this; } @Override public RawDisk set(String fieldName, Object value) { return (RawDisk) super.set(fieldName, value); } @Override public RawDisk clone() { return (RawDisk) super.clone(); } } }
googleapis/google-cloud-java
37,890
java-video-stitcher/proto-google-cloud-video-stitcher-v1/src/main/java/com/google/cloud/video/stitcher/v1/CreateCdnKeyRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/video/stitcher/v1/video_stitcher_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.video.stitcher.v1; /** * * * <pre> * Request message for VideoStitcherService.createCdnKey. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.CreateCdnKeyRequest} */ public final class CreateCdnKeyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.CreateCdnKeyRequest) CreateCdnKeyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateCdnKeyRequest.newBuilder() to construct. private CreateCdnKeyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateCdnKeyRequest() { parent_ = ""; cdnKeyId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateCdnKeyRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_CreateCdnKeyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_CreateCdnKeyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.class, com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CDN_KEY_FIELD_NUMBER = 2; private com.google.cloud.video.stitcher.v1.CdnKey cdnKey_; /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the cdnKey field is set. */ @java.lang.Override public boolean hasCdnKey() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The cdnKey. */ @java.lang.Override public com.google.cloud.video.stitcher.v1.CdnKey getCdnKey() { return cdnKey_ == null ? com.google.cloud.video.stitcher.v1.CdnKey.getDefaultInstance() : cdnKey_; } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.video.stitcher.v1.CdnKeyOrBuilder getCdnKeyOrBuilder() { return cdnKey_ == null ? com.google.cloud.video.stitcher.v1.CdnKey.getDefaultInstance() : cdnKey_; } public static final int CDN_KEY_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object cdnKeyId_ = ""; /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The cdnKeyId. */ @java.lang.Override public java.lang.String getCdnKeyId() { java.lang.Object ref = cdnKeyId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cdnKeyId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for cdnKeyId. */ @java.lang.Override public com.google.protobuf.ByteString getCdnKeyIdBytes() { java.lang.Object ref = cdnKeyId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cdnKeyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getCdnKey()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cdnKeyId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, cdnKeyId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCdnKey()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cdnKeyId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, cdnKeyId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest)) { return super.equals(obj); } com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest other = (com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasCdnKey() != other.hasCdnKey()) return false; if (hasCdnKey()) { if (!getCdnKey().equals(other.getCdnKey())) return false; } if (!getCdnKeyId().equals(other.getCdnKeyId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasCdnKey()) { hash = (37 * hash) + CDN_KEY_FIELD_NUMBER; hash = (53 * hash) + getCdnKey().hashCode(); } hash = (37 * hash) + CDN_KEY_ID_FIELD_NUMBER; hash = (53 * hash) + getCdnKeyId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for VideoStitcherService.createCdnKey. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.CreateCdnKeyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.CreateCdnKeyRequest) com.google.cloud.video.stitcher.v1.CreateCdnKeyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_CreateCdnKeyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_CreateCdnKeyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.class, com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.Builder.class); } // Construct using com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCdnKeyFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; cdnKey_ = null; if (cdnKeyBuilder_ != null) { cdnKeyBuilder_.dispose(); cdnKeyBuilder_ = null; } cdnKeyId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto .internal_static_google_cloud_video_stitcher_v1_CreateCdnKeyRequest_descriptor; } @java.lang.Override public com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest getDefaultInstanceForType() { return com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest build() { com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest buildPartial() { com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest result = new com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.cdnKey_ = cdnKeyBuilder_ == null ? cdnKey_ : cdnKeyBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.cdnKeyId_ = cdnKeyId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest) { return mergeFrom((com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest other) { if (other == com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCdnKey()) { mergeCdnKey(other.getCdnKey()); } if (!other.getCdnKeyId().isEmpty()) { cdnKeyId_ = other.cdnKeyId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getCdnKeyFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { cdnKeyId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The project in which the CDN key should be created, in the form * of `projects/{project_number}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.video.stitcher.v1.CdnKey cdnKey_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.video.stitcher.v1.CdnKey, com.google.cloud.video.stitcher.v1.CdnKey.Builder, com.google.cloud.video.stitcher.v1.CdnKeyOrBuilder> cdnKeyBuilder_; /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the cdnKey field is set. */ public boolean hasCdnKey() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The cdnKey. */ public com.google.cloud.video.stitcher.v1.CdnKey getCdnKey() { if (cdnKeyBuilder_ == null) { return cdnKey_ == null ? com.google.cloud.video.stitcher.v1.CdnKey.getDefaultInstance() : cdnKey_; } else { return cdnKeyBuilder_.getMessage(); } } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCdnKey(com.google.cloud.video.stitcher.v1.CdnKey value) { if (cdnKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cdnKey_ = value; } else { cdnKeyBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCdnKey(com.google.cloud.video.stitcher.v1.CdnKey.Builder builderForValue) { if (cdnKeyBuilder_ == null) { cdnKey_ = builderForValue.build(); } else { cdnKeyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCdnKey(com.google.cloud.video.stitcher.v1.CdnKey value) { if (cdnKeyBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && cdnKey_ != null && cdnKey_ != com.google.cloud.video.stitcher.v1.CdnKey.getDefaultInstance()) { getCdnKeyBuilder().mergeFrom(value); } else { cdnKey_ = value; } } else { cdnKeyBuilder_.mergeFrom(value); } if (cdnKey_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCdnKey() { bitField0_ = (bitField0_ & ~0x00000002); cdnKey_ = null; if (cdnKeyBuilder_ != null) { cdnKeyBuilder_.dispose(); cdnKeyBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.video.stitcher.v1.CdnKey.Builder getCdnKeyBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCdnKeyFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.video.stitcher.v1.CdnKeyOrBuilder getCdnKeyOrBuilder() { if (cdnKeyBuilder_ != null) { return cdnKeyBuilder_.getMessageOrBuilder(); } else { return cdnKey_ == null ? com.google.cloud.video.stitcher.v1.CdnKey.getDefaultInstance() : cdnKey_; } } /** * * * <pre> * Required. The CDN key resource to create. * </pre> * * <code> * .google.cloud.video.stitcher.v1.CdnKey cdn_key = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.video.stitcher.v1.CdnKey, com.google.cloud.video.stitcher.v1.CdnKey.Builder, com.google.cloud.video.stitcher.v1.CdnKeyOrBuilder> getCdnKeyFieldBuilder() { if (cdnKeyBuilder_ == null) { cdnKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.video.stitcher.v1.CdnKey, com.google.cloud.video.stitcher.v1.CdnKey.Builder, com.google.cloud.video.stitcher.v1.CdnKeyOrBuilder>( getCdnKey(), getParentForChildren(), isClean()); cdnKey_ = null; } return cdnKeyBuilder_; } private java.lang.Object cdnKeyId_ = ""; /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The cdnKeyId. */ public java.lang.String getCdnKeyId() { java.lang.Object ref = cdnKeyId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cdnKeyId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for cdnKeyId. */ public com.google.protobuf.ByteString getCdnKeyIdBytes() { java.lang.Object ref = cdnKeyId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cdnKeyId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The cdnKeyId to set. * @return This builder for chaining. */ public Builder setCdnKeyId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } cdnKeyId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearCdnKeyId() { cdnKeyId_ = getDefaultInstance().getCdnKeyId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the CDN key, which will become the final * component of the CDN key's resource name. * * This value should conform to RFC-1034, which restricts to * lower-case letters, numbers, and hyphen, with the first character a * letter, the last a letter or a number, and a 63 character maximum. * </pre> * * <code>string cdn_key_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for cdnKeyId to set. * @return This builder for chaining. */ public Builder setCdnKeyIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); cdnKeyId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.CreateCdnKeyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.CreateCdnKeyRequest) private static final com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest(); } public static com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateCdnKeyRequest> PARSER = new com.google.protobuf.AbstractParser<CreateCdnKeyRequest>() { @java.lang.Override public CreateCdnKeyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateCdnKeyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateCdnKeyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.video.stitcher.v1.CreateCdnKeyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,902
java-dataproc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/ListClustersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1/clusters.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataproc.v1; /** * * * <pre> * The list of all clusters in a project. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.ListClustersResponse} */ public final class ListClustersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ListClustersResponse) ListClustersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListClustersResponse.newBuilder() to construct. private ListClustersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListClustersResponse() { clusters_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListClustersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_ListClustersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_ListClustersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.ListClustersResponse.class, com.google.cloud.dataproc.v1.ListClustersResponse.Builder.class); } public static final int CLUSTERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dataproc.v1.Cluster> clusters_; /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.dataproc.v1.Cluster> getClustersList() { return clusters_; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dataproc.v1.ClusterOrBuilder> getClustersOrBuilderList() { return clusters_; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public int getClustersCount() { return clusters_.size(); } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.dataproc.v1.Cluster getClusters(int index) { return clusters_.get(index); } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder(int index) { return clusters_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < clusters_.size(); i++) { output.writeMessage(1, clusters_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < clusters_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, clusters_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1.ListClustersResponse)) { return super.equals(obj); } com.google.cloud.dataproc.v1.ListClustersResponse other = (com.google.cloud.dataproc.v1.ListClustersResponse) obj; if (!getClustersList().equals(other.getClustersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getClustersCount() > 0) { hash = (37 * hash) + CLUSTERS_FIELD_NUMBER; hash = (53 * hash) + getClustersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.ListClustersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataproc.v1.ListClustersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The list of all clusters in a project. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.ListClustersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ListClustersResponse) com.google.cloud.dataproc.v1.ListClustersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_ListClustersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_ListClustersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.ListClustersResponse.class, com.google.cloud.dataproc.v1.ListClustersResponse.Builder.class); } // Construct using com.google.cloud.dataproc.v1.ListClustersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (clustersBuilder_ == null) { clusters_ = java.util.Collections.emptyList(); } else { clusters_ = null; clustersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_ListClustersResponse_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1.ListClustersResponse getDefaultInstanceForType() { return com.google.cloud.dataproc.v1.ListClustersResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1.ListClustersResponse build() { com.google.cloud.dataproc.v1.ListClustersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1.ListClustersResponse buildPartial() { com.google.cloud.dataproc.v1.ListClustersResponse result = new com.google.cloud.dataproc.v1.ListClustersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dataproc.v1.ListClustersResponse result) { if (clustersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { clusters_ = java.util.Collections.unmodifiableList(clusters_); bitField0_ = (bitField0_ & ~0x00000001); } result.clusters_ = clusters_; } else { result.clusters_ = clustersBuilder_.build(); } } private void buildPartial0(com.google.cloud.dataproc.v1.ListClustersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1.ListClustersResponse) { return mergeFrom((com.google.cloud.dataproc.v1.ListClustersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1.ListClustersResponse other) { if (other == com.google.cloud.dataproc.v1.ListClustersResponse.getDefaultInstance()) return this; if (clustersBuilder_ == null) { if (!other.clusters_.isEmpty()) { if (clusters_.isEmpty()) { clusters_ = other.clusters_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureClustersIsMutable(); clusters_.addAll(other.clusters_); } onChanged(); } } else { if (!other.clusters_.isEmpty()) { if (clustersBuilder_.isEmpty()) { clustersBuilder_.dispose(); clustersBuilder_ = null; clusters_ = other.clusters_; bitField0_ = (bitField0_ & ~0x00000001); clustersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getClustersFieldBuilder() : null; } else { clustersBuilder_.addAllMessages(other.clusters_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dataproc.v1.Cluster m = input.readMessage( com.google.cloud.dataproc.v1.Cluster.parser(), extensionRegistry); if (clustersBuilder_ == null) { ensureClustersIsMutable(); clusters_.add(m); } else { clustersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dataproc.v1.Cluster> clusters_ = java.util.Collections.emptyList(); private void ensureClustersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { clusters_ = new java.util.ArrayList<com.google.cloud.dataproc.v1.Cluster>(clusters_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataproc.v1.Cluster, com.google.cloud.dataproc.v1.Cluster.Builder, com.google.cloud.dataproc.v1.ClusterOrBuilder> clustersBuilder_; /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List<com.google.cloud.dataproc.v1.Cluster> getClustersList() { if (clustersBuilder_ == null) { return java.util.Collections.unmodifiableList(clusters_); } else { return clustersBuilder_.getMessageList(); } } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public int getClustersCount() { if (clustersBuilder_ == null) { return clusters_.size(); } else { return clustersBuilder_.getCount(); } } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dataproc.v1.Cluster getClusters(int index) { if (clustersBuilder_ == null) { return clusters_.get(index); } else { return clustersBuilder_.getMessage(index); } } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setClusters(int index, com.google.cloud.dataproc.v1.Cluster value) { if (clustersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClustersIsMutable(); clusters_.set(index, value); onChanged(); } else { clustersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setClusters( int index, com.google.cloud.dataproc.v1.Cluster.Builder builderForValue) { if (clustersBuilder_ == null) { ensureClustersIsMutable(); clusters_.set(index, builderForValue.build()); onChanged(); } else { clustersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addClusters(com.google.cloud.dataproc.v1.Cluster value) { if (clustersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClustersIsMutable(); clusters_.add(value); onChanged(); } else { clustersBuilder_.addMessage(value); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addClusters(int index, com.google.cloud.dataproc.v1.Cluster value) { if (clustersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureClustersIsMutable(); clusters_.add(index, value); onChanged(); } else { clustersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addClusters(com.google.cloud.dataproc.v1.Cluster.Builder builderForValue) { if (clustersBuilder_ == null) { ensureClustersIsMutable(); clusters_.add(builderForValue.build()); onChanged(); } else { clustersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addClusters( int index, com.google.cloud.dataproc.v1.Cluster.Builder builderForValue) { if (clustersBuilder_ == null) { ensureClustersIsMutable(); clusters_.add(index, builderForValue.build()); onChanged(); } else { clustersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder addAllClusters( java.lang.Iterable<? extends com.google.cloud.dataproc.v1.Cluster> values) { if (clustersBuilder_ == null) { ensureClustersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, clusters_); onChanged(); } else { clustersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearClusters() { if (clustersBuilder_ == null) { clusters_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { clustersBuilder_.clear(); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder removeClusters(int index) { if (clustersBuilder_ == null) { ensureClustersIsMutable(); clusters_.remove(index); onChanged(); } else { clustersBuilder_.remove(index); } return this; } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dataproc.v1.Cluster.Builder getClustersBuilder(int index) { return getClustersFieldBuilder().getBuilder(index); } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dataproc.v1.ClusterOrBuilder getClustersOrBuilder(int index) { if (clustersBuilder_ == null) { return clusters_.get(index); } else { return clustersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List<? extends com.google.cloud.dataproc.v1.ClusterOrBuilder> getClustersOrBuilderList() { if (clustersBuilder_ != null) { return clustersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(clusters_); } } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dataproc.v1.Cluster.Builder addClustersBuilder() { return getClustersFieldBuilder() .addBuilder(com.google.cloud.dataproc.v1.Cluster.getDefaultInstance()); } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.dataproc.v1.Cluster.Builder addClustersBuilder(int index) { return getClustersFieldBuilder() .addBuilder(index, com.google.cloud.dataproc.v1.Cluster.getDefaultInstance()); } /** * * * <pre> * Output only. The clusters in the project. * </pre> * * <code> * repeated .google.cloud.dataproc.v1.Cluster clusters = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public java.util.List<com.google.cloud.dataproc.v1.Cluster.Builder> getClustersBuilderList() { return getClustersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataproc.v1.Cluster, com.google.cloud.dataproc.v1.Cluster.Builder, com.google.cloud.dataproc.v1.ClusterOrBuilder> getClustersFieldBuilder() { if (clustersBuilder_ == null) { clustersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataproc.v1.Cluster, com.google.cloud.dataproc.v1.Cluster.Builder, com.google.cloud.dataproc.v1.ClusterOrBuilder>( clusters_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); clusters_ = null; } return clustersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. This token is included in the response if there are more * results to fetch. To fetch additional results, provide this value as the * `page_token` in a subsequent `ListClustersRequest`. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ListClustersResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ListClustersResponse) private static final com.google.cloud.dataproc.v1.ListClustersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ListClustersResponse(); } public static com.google.cloud.dataproc.v1.ListClustersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListClustersResponse> PARSER = new com.google.protobuf.AbstractParser<ListClustersResponse>() { @java.lang.Override public ListClustersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListClustersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListClustersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1.ListClustersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,939
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/TestIamPermissionsImageRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * A request message for Images.TestIamPermissions. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsImageRequest} */ public final class TestIamPermissionsImageRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.TestIamPermissionsImageRequest) TestIamPermissionsImageRequestOrBuilder { private static final long serialVersionUID = 0L; // Use TestIamPermissionsImageRequest.newBuilder() to construct. private TestIamPermissionsImageRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TestIamPermissionsImageRequest() { project_ = ""; resource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TestIamPermissionsImageRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_TestIamPermissionsImageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_TestIamPermissionsImageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.TestIamPermissionsImageRequest.class, com.google.cloud.compute.v1.TestIamPermissionsImageRequest.Builder.class); } private int bitField0_; public static final int PROJECT_FIELD_NUMBER = 227560217; @SuppressWarnings("serial") private volatile java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_FIELD_NUMBER = 195806222; @SuppressWarnings("serial") private volatile java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ @java.lang.Override public java.lang.String getResource() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ @java.lang.Override public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER = 439214758; private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the testPermissionsRequestResource field is set. */ @java.lang.Override public boolean hasTestPermissionsRequestResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The testPermissionsRequestResource. */ @java.lang.Override public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() { return testPermissionsRequestResource_ == null ? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance() : testPermissionsRequestResource_; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder getTestPermissionsRequestResourceOrBuilder() { return testPermissionsRequestResource_ == null ? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance() : testPermissionsRequestResource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(439214758, getTestPermissionsRequestResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 439214758, getTestPermissionsRequestResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.TestIamPermissionsImageRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.TestIamPermissionsImageRequest other = (com.google.cloud.compute.v1.TestIamPermissionsImageRequest) obj; if (!getProject().equals(other.getProject())) return false; if (!getResource().equals(other.getResource())) return false; if (hasTestPermissionsRequestResource() != other.hasTestPermissionsRequestResource()) return false; if (hasTestPermissionsRequestResource()) { if (!getTestPermissionsRequestResource().equals(other.getTestPermissionsRequestResource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); if (hasTestPermissionsRequestResource()) { hash = (37 * hash) + TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getTestPermissionsRequestResource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.TestIamPermissionsImageRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for Images.TestIamPermissions. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsImageRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.TestIamPermissionsImageRequest) com.google.cloud.compute.v1.TestIamPermissionsImageRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_TestIamPermissionsImageRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_TestIamPermissionsImageRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.TestIamPermissionsImageRequest.class, com.google.cloud.compute.v1.TestIamPermissionsImageRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.TestIamPermissionsImageRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTestPermissionsRequestResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; project_ = ""; resource_ = ""; testPermissionsRequestResource_ = null; if (testPermissionsRequestResourceBuilder_ != null) { testPermissionsRequestResourceBuilder_.dispose(); testPermissionsRequestResourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_TestIamPermissionsImageRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.TestIamPermissionsImageRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.TestIamPermissionsImageRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.TestIamPermissionsImageRequest build() { com.google.cloud.compute.v1.TestIamPermissionsImageRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.TestIamPermissionsImageRequest buildPartial() { com.google.cloud.compute.v1.TestIamPermissionsImageRequest result = new com.google.cloud.compute.v1.TestIamPermissionsImageRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.TestIamPermissionsImageRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.project_ = project_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.resource_ = resource_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.testPermissionsRequestResource_ = testPermissionsRequestResourceBuilder_ == null ? testPermissionsRequestResource_ : testPermissionsRequestResourceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.TestIamPermissionsImageRequest) { return mergeFrom((com.google.cloud.compute.v1.TestIamPermissionsImageRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.TestIamPermissionsImageRequest other) { if (other == com.google.cloud.compute.v1.TestIamPermissionsImageRequest.getDefaultInstance()) return this; if (!other.getProject().isEmpty()) { project_ = other.project_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getResource().isEmpty()) { resource_ = other.resource_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasTestPermissionsRequestResource()) { mergeTestPermissionsRequestResource(other.getTestPermissionsRequestResource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1566449778: { resource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 1566449778 case 1820481738: { project_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 1820481738 case -781249230: { input.readMessage( getTestPermissionsRequestResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case -781249230 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ public java.lang.String getResource() { java.lang.Object ref = resource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The resource to set. * @return This builder for chaining. */ public Builder setResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resource_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResource() { resource_ = getDefaultInstance().getResource(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for resource to set. * @return This builder for chaining. */ public Builder setResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resource_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.TestPermissionsRequest, com.google.cloud.compute.v1.TestPermissionsRequest.Builder, com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder> testPermissionsRequestResourceBuilder_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the testPermissionsRequestResource field is set. */ public boolean hasTestPermissionsRequestResource() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The testPermissionsRequestResource. */ public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() { if (testPermissionsRequestResourceBuilder_ == null) { return testPermissionsRequestResource_ == null ? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance() : testPermissionsRequestResource_; } else { return testPermissionsRequestResourceBuilder_.getMessage(); } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTestPermissionsRequestResource( com.google.cloud.compute.v1.TestPermissionsRequest value) { if (testPermissionsRequestResourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } testPermissionsRequestResource_ = value; } else { testPermissionsRequestResourceBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTestPermissionsRequestResource( com.google.cloud.compute.v1.TestPermissionsRequest.Builder builderForValue) { if (testPermissionsRequestResourceBuilder_ == null) { testPermissionsRequestResource_ = builderForValue.build(); } else { testPermissionsRequestResourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeTestPermissionsRequestResource( com.google.cloud.compute.v1.TestPermissionsRequest value) { if (testPermissionsRequestResourceBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && testPermissionsRequestResource_ != null && testPermissionsRequestResource_ != com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()) { getTestPermissionsRequestResourceBuilder().mergeFrom(value); } else { testPermissionsRequestResource_ = value; } } else { testPermissionsRequestResourceBuilder_.mergeFrom(value); } if (testPermissionsRequestResource_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearTestPermissionsRequestResource() { bitField0_ = (bitField0_ & ~0x00000004); testPermissionsRequestResource_ = null; if (testPermissionsRequestResourceBuilder_ != null) { testPermissionsRequestResourceBuilder_.dispose(); testPermissionsRequestResourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.TestPermissionsRequest.Builder getTestPermissionsRequestResourceBuilder() { bitField0_ |= 0x00000004; onChanged(); return getTestPermissionsRequestResourceFieldBuilder().getBuilder(); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder getTestPermissionsRequestResourceOrBuilder() { if (testPermissionsRequestResourceBuilder_ != null) { return testPermissionsRequestResourceBuilder_.getMessageOrBuilder(); } else { return testPermissionsRequestResource_ == null ? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance() : testPermissionsRequestResource_; } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.TestPermissionsRequest, com.google.cloud.compute.v1.TestPermissionsRequest.Builder, com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder> getTestPermissionsRequestResourceFieldBuilder() { if (testPermissionsRequestResourceBuilder_ == null) { testPermissionsRequestResourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.TestPermissionsRequest, com.google.cloud.compute.v1.TestPermissionsRequest.Builder, com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>( getTestPermissionsRequestResource(), getParentForChildren(), isClean()); testPermissionsRequestResource_ = null; } return testPermissionsRequestResourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.TestIamPermissionsImageRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.TestIamPermissionsImageRequest) private static final com.google.cloud.compute.v1.TestIamPermissionsImageRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.TestIamPermissionsImageRequest(); } public static com.google.cloud.compute.v1.TestIamPermissionsImageRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TestIamPermissionsImageRequest> PARSER = new com.google.protobuf.AbstractParser<TestIamPermissionsImageRequest>() { @java.lang.Override public TestIamPermissionsImageRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TestIamPermissionsImageRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TestIamPermissionsImageRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.TestIamPermissionsImageRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
38,107
java-retail/grpc-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/ControlServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.retail.v2beta; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for modifying Control. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/retail/v2beta/control_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ControlServiceGrpc { private ControlServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.retail.v2beta.ControlService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.CreateControlRequest, com.google.cloud.retail.v2beta.Control> getCreateControlMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateControl", requestType = com.google.cloud.retail.v2beta.CreateControlRequest.class, responseType = com.google.cloud.retail.v2beta.Control.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.CreateControlRequest, com.google.cloud.retail.v2beta.Control> getCreateControlMethod() { io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.CreateControlRequest, com.google.cloud.retail.v2beta.Control> getCreateControlMethod; if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) { synchronized (ControlServiceGrpc.class) { if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) { ControlServiceGrpc.getCreateControlMethod = getCreateControlMethod = io.grpc.MethodDescriptor .<com.google.cloud.retail.v2beta.CreateControlRequest, com.google.cloud.retail.v2beta.Control> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateControl")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.CreateControlRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.Control.getDefaultInstance())) .setSchemaDescriptor( new ControlServiceMethodDescriptorSupplier("CreateControl")) .build(); } } } return getCreateControlMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.DeleteControlRequest, com.google.protobuf.Empty> getDeleteControlMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteControl", requestType = com.google.cloud.retail.v2beta.DeleteControlRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.DeleteControlRequest, com.google.protobuf.Empty> getDeleteControlMethod() { io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.DeleteControlRequest, com.google.protobuf.Empty> getDeleteControlMethod; if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) { synchronized (ControlServiceGrpc.class) { if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) { ControlServiceGrpc.getDeleteControlMethod = getDeleteControlMethod = io.grpc.MethodDescriptor .<com.google.cloud.retail.v2beta.DeleteControlRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteControl")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.DeleteControlRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new ControlServiceMethodDescriptorSupplier("DeleteControl")) .build(); } } } return getDeleteControlMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.UpdateControlRequest, com.google.cloud.retail.v2beta.Control> getUpdateControlMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateControl", requestType = com.google.cloud.retail.v2beta.UpdateControlRequest.class, responseType = com.google.cloud.retail.v2beta.Control.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.UpdateControlRequest, com.google.cloud.retail.v2beta.Control> getUpdateControlMethod() { io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.UpdateControlRequest, com.google.cloud.retail.v2beta.Control> getUpdateControlMethod; if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) { synchronized (ControlServiceGrpc.class) { if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) { ControlServiceGrpc.getUpdateControlMethod = getUpdateControlMethod = io.grpc.MethodDescriptor .<com.google.cloud.retail.v2beta.UpdateControlRequest, com.google.cloud.retail.v2beta.Control> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateControl")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.UpdateControlRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.Control.getDefaultInstance())) .setSchemaDescriptor( new ControlServiceMethodDescriptorSupplier("UpdateControl")) .build(); } } } return getUpdateControlMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.GetControlRequest, com.google.cloud.retail.v2beta.Control> getGetControlMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetControl", requestType = com.google.cloud.retail.v2beta.GetControlRequest.class, responseType = com.google.cloud.retail.v2beta.Control.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.GetControlRequest, com.google.cloud.retail.v2beta.Control> getGetControlMethod() { io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.GetControlRequest, com.google.cloud.retail.v2beta.Control> getGetControlMethod; if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) { synchronized (ControlServiceGrpc.class) { if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) { ControlServiceGrpc.getGetControlMethod = getGetControlMethod = io.grpc.MethodDescriptor .<com.google.cloud.retail.v2beta.GetControlRequest, com.google.cloud.retail.v2beta.Control> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetControl")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.GetControlRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.Control.getDefaultInstance())) .setSchemaDescriptor(new ControlServiceMethodDescriptorSupplier("GetControl")) .build(); } } } return getGetControlMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.ListControlsRequest, com.google.cloud.retail.v2beta.ListControlsResponse> getListControlsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListControls", requestType = com.google.cloud.retail.v2beta.ListControlsRequest.class, responseType = com.google.cloud.retail.v2beta.ListControlsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.ListControlsRequest, com.google.cloud.retail.v2beta.ListControlsResponse> getListControlsMethod() { io.grpc.MethodDescriptor< com.google.cloud.retail.v2beta.ListControlsRequest, com.google.cloud.retail.v2beta.ListControlsResponse> getListControlsMethod; if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) { synchronized (ControlServiceGrpc.class) { if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) { ControlServiceGrpc.getListControlsMethod = getListControlsMethod = io.grpc.MethodDescriptor .<com.google.cloud.retail.v2beta.ListControlsRequest, com.google.cloud.retail.v2beta.ListControlsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListControls")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.ListControlsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.retail.v2beta.ListControlsResponse .getDefaultInstance())) .setSchemaDescriptor( new ControlServiceMethodDescriptorSupplier("ListControls")) .build(); } } } return getListControlsMethod; } /** Creates a new async stub that supports all call types for the service */ public static ControlServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub>() { @java.lang.Override public ControlServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceStub(channel, callOptions); } }; return ControlServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static ControlServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub>() { @java.lang.Override public ControlServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceBlockingV2Stub(channel, callOptions); } }; return ControlServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ControlServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub>() { @java.lang.Override public ControlServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceBlockingStub(channel, callOptions); } }; return ControlServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ControlServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub>() { @java.lang.Override public ControlServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceFutureStub(channel, callOptions); } }; return ControlServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for modifying Control. * </pre> */ public interface AsyncService { /** * * * <pre> * Creates a Control. * If the [Control][google.cloud.retail.v2beta.Control] to create already * exists, an ALREADY_EXISTS error is returned. * </pre> */ default void createControl( com.google.cloud.retail.v2beta.CreateControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateControlMethod(), responseObserver); } /** * * * <pre> * Deletes a Control. * If the [Control][google.cloud.retail.v2beta.Control] to delete does not * exist, a NOT_FOUND error is returned. * </pre> */ default void deleteControl( com.google.cloud.retail.v2beta.DeleteControlRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteControlMethod(), responseObserver); } /** * * * <pre> * Updates a Control. * [Control][google.cloud.retail.v2beta.Control] cannot be set to a different * oneof field, if so an INVALID_ARGUMENT is returned. If the * [Control][google.cloud.retail.v2beta.Control] to update does not exist, a * NOT_FOUND error is returned. * </pre> */ default void updateControl( com.google.cloud.retail.v2beta.UpdateControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateControlMethod(), responseObserver); } /** * * * <pre> * Gets a Control. * </pre> */ default void getControl( com.google.cloud.retail.v2beta.GetControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetControlMethod(), responseObserver); } /** * * * <pre> * Lists all Controls by their parent * [Catalog][google.cloud.retail.v2beta.Catalog]. * </pre> */ default void listControls( com.google.cloud.retail.v2beta.ListControlsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.ListControlsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListControlsMethod(), responseObserver); } } /** * Base class for the server implementation of the service ControlService. * * <pre> * Service for modifying Control. * </pre> */ public abstract static class ControlServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ControlServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ControlService. * * <pre> * Service for modifying Control. * </pre> */ public static final class ControlServiceStub extends io.grpc.stub.AbstractAsyncStub<ControlServiceStub> { private ControlServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceStub(channel, callOptions); } /** * * * <pre> * Creates a Control. * If the [Control][google.cloud.retail.v2beta.Control] to create already * exists, an ALREADY_EXISTS error is returned. * </pre> */ public void createControl( com.google.cloud.retail.v2beta.CreateControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateControlMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes a Control. * If the [Control][google.cloud.retail.v2beta.Control] to delete does not * exist, a NOT_FOUND error is returned. * </pre> */ public void deleteControl( com.google.cloud.retail.v2beta.DeleteControlRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteControlMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates a Control. * [Control][google.cloud.retail.v2beta.Control] cannot be set to a different * oneof field, if so an INVALID_ARGUMENT is returned. If the * [Control][google.cloud.retail.v2beta.Control] to update does not exist, a * NOT_FOUND error is returned. * </pre> */ public void updateControl( com.google.cloud.retail.v2beta.UpdateControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateControlMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets a Control. * </pre> */ public void getControl( com.google.cloud.retail.v2beta.GetControlRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetControlMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Lists all Controls by their parent * [Catalog][google.cloud.retail.v2beta.Catalog]. * </pre> */ public void listControls( com.google.cloud.retail.v2beta.ListControlsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.ListControlsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListControlsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ControlService. * * <pre> * Service for modifying Control. * </pre> */ public static final class ControlServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingV2Stub> { private ControlServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Creates a Control. * If the [Control][google.cloud.retail.v2beta.Control] to create already * exists, an ALREADY_EXISTS error is returned. * </pre> */ public com.google.cloud.retail.v2beta.Control createControl( com.google.cloud.retail.v2beta.CreateControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateControlMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a Control. * If the [Control][google.cloud.retail.v2beta.Control] to delete does not * exist, a NOT_FOUND error is returned. * </pre> */ public com.google.protobuf.Empty deleteControl( com.google.cloud.retail.v2beta.DeleteControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteControlMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a Control. * [Control][google.cloud.retail.v2beta.Control] cannot be set to a different * oneof field, if so an INVALID_ARGUMENT is returned. If the * [Control][google.cloud.retail.v2beta.Control] to update does not exist, a * NOT_FOUND error is returned. * </pre> */ public com.google.cloud.retail.v2beta.Control updateControl( com.google.cloud.retail.v2beta.UpdateControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateControlMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a Control. * </pre> */ public com.google.cloud.retail.v2beta.Control getControl( com.google.cloud.retail.v2beta.GetControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetControlMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all Controls by their parent * [Catalog][google.cloud.retail.v2beta.Catalog]. * </pre> */ public com.google.cloud.retail.v2beta.ListControlsResponse listControls( com.google.cloud.retail.v2beta.ListControlsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListControlsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service ControlService. * * <pre> * Service for modifying Control. * </pre> */ public static final class ControlServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingStub> { private ControlServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Creates a Control. * If the [Control][google.cloud.retail.v2beta.Control] to create already * exists, an ALREADY_EXISTS error is returned. * </pre> */ public com.google.cloud.retail.v2beta.Control createControl( com.google.cloud.retail.v2beta.CreateControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateControlMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a Control. * If the [Control][google.cloud.retail.v2beta.Control] to delete does not * exist, a NOT_FOUND error is returned. * </pre> */ public com.google.protobuf.Empty deleteControl( com.google.cloud.retail.v2beta.DeleteControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteControlMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a Control. * [Control][google.cloud.retail.v2beta.Control] cannot be set to a different * oneof field, if so an INVALID_ARGUMENT is returned. If the * [Control][google.cloud.retail.v2beta.Control] to update does not exist, a * NOT_FOUND error is returned. * </pre> */ public com.google.cloud.retail.v2beta.Control updateControl( com.google.cloud.retail.v2beta.UpdateControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateControlMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a Control. * </pre> */ public com.google.cloud.retail.v2beta.Control getControl( com.google.cloud.retail.v2beta.GetControlRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetControlMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all Controls by their parent * [Catalog][google.cloud.retail.v2beta.Catalog]. * </pre> */ public com.google.cloud.retail.v2beta.ListControlsResponse listControls( com.google.cloud.retail.v2beta.ListControlsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListControlsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ControlService. * * <pre> * Service for modifying Control. * </pre> */ public static final class ControlServiceFutureStub extends io.grpc.stub.AbstractFutureStub<ControlServiceFutureStub> { private ControlServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ControlServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ControlServiceFutureStub(channel, callOptions); } /** * * * <pre> * Creates a Control. * If the [Control][google.cloud.retail.v2beta.Control] to create already * exists, an ALREADY_EXISTS error is returned. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.retail.v2beta.Control> createControl(com.google.cloud.retail.v2beta.CreateControlRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateControlMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes a Control. * If the [Control][google.cloud.retail.v2beta.Control] to delete does not * exist, a NOT_FOUND error is returned. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteControl(com.google.cloud.retail.v2beta.DeleteControlRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteControlMethod(), getCallOptions()), request); } /** * * * <pre> * Updates a Control. * [Control][google.cloud.retail.v2beta.Control] cannot be set to a different * oneof field, if so an INVALID_ARGUMENT is returned. If the * [Control][google.cloud.retail.v2beta.Control] to update does not exist, a * NOT_FOUND error is returned. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.retail.v2beta.Control> updateControl(com.google.cloud.retail.v2beta.UpdateControlRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateControlMethod(), getCallOptions()), request); } /** * * * <pre> * Gets a Control. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.retail.v2beta.Control> getControl(com.google.cloud.retail.v2beta.GetControlRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetControlMethod(), getCallOptions()), request); } /** * * * <pre> * Lists all Controls by their parent * [Catalog][google.cloud.retail.v2beta.Catalog]. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.retail.v2beta.ListControlsResponse> listControls(com.google.cloud.retail.v2beta.ListControlsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListControlsMethod(), getCallOptions()), request); } } private static final int METHODID_CREATE_CONTROL = 0; private static final int METHODID_DELETE_CONTROL = 1; private static final int METHODID_UPDATE_CONTROL = 2; private static final int METHODID_GET_CONTROL = 3; private static final int METHODID_LIST_CONTROLS = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_CREATE_CONTROL: serviceImpl.createControl( (com.google.cloud.retail.v2beta.CreateControlRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control>) responseObserver); break; case METHODID_DELETE_CONTROL: serviceImpl.deleteControl( (com.google.cloud.retail.v2beta.DeleteControlRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; case METHODID_UPDATE_CONTROL: serviceImpl.updateControl( (com.google.cloud.retail.v2beta.UpdateControlRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control>) responseObserver); break; case METHODID_GET_CONTROL: serviceImpl.getControl( (com.google.cloud.retail.v2beta.GetControlRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.Control>) responseObserver); break; case METHODID_LIST_CONTROLS: serviceImpl.listControls( (com.google.cloud.retail.v2beta.ListControlsRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.retail.v2beta.ListControlsResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getCreateControlMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.retail.v2beta.CreateControlRequest, com.google.cloud.retail.v2beta.Control>(service, METHODID_CREATE_CONTROL))) .addMethod( getDeleteControlMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.retail.v2beta.DeleteControlRequest, com.google.protobuf.Empty>( service, METHODID_DELETE_CONTROL))) .addMethod( getUpdateControlMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.retail.v2beta.UpdateControlRequest, com.google.cloud.retail.v2beta.Control>(service, METHODID_UPDATE_CONTROL))) .addMethod( getGetControlMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.retail.v2beta.GetControlRequest, com.google.cloud.retail.v2beta.Control>(service, METHODID_GET_CONTROL))) .addMethod( getListControlsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.retail.v2beta.ListControlsRequest, com.google.cloud.retail.v2beta.ListControlsResponse>( service, METHODID_LIST_CONTROLS))) .build(); } private abstract static class ControlServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ControlServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.retail.v2beta.ControlServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ControlService"); } } private static final class ControlServiceFileDescriptorSupplier extends ControlServiceBaseDescriptorSupplier { ControlServiceFileDescriptorSupplier() {} } private static final class ControlServiceMethodDescriptorSupplier extends ControlServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ControlServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ControlServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ControlServiceFileDescriptorSupplier()) .addMethod(getCreateControlMethod()) .addMethod(getDeleteControlMethod()) .addMethod(getUpdateControlMethod()) .addMethod(getGetControlMethod()) .addMethod(getListControlsMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
37,973
java-dms/google-cloud-dms/src/test/java/com/google/cloud/clouddms/v1/MockDataMigrationServiceImpl.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.clouddms.v1; import com.google.api.core.BetaApi; import com.google.cloud.clouddms.v1.DataMigrationServiceGrpc.DataMigrationServiceImplBase; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import io.grpc.stub.StreamObserver; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Queue; import javax.annotation.Generated; @BetaApi @Generated("by gapic-generator-java") public class MockDataMigrationServiceImpl extends DataMigrationServiceImplBase { private List<AbstractMessage> requests; private Queue<Object> responses; public MockDataMigrationServiceImpl() { requests = new ArrayList<>(); responses = new LinkedList<>(); } public List<AbstractMessage> getRequests() { return requests; } public void addResponse(AbstractMessage response) { responses.add(response); } public void setResponses(List<AbstractMessage> responses) { this.responses = new LinkedList<Object>(responses); } public void addException(Exception exception) { responses.add(exception); } public void reset() { requests = new ArrayList<>(); responses = new LinkedList<>(); } @Override public void listMigrationJobs( ListMigrationJobsRequest request, StreamObserver<ListMigrationJobsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListMigrationJobsResponse) { requests.add(request); responseObserver.onNext(((ListMigrationJobsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListMigrationJobs, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListMigrationJobsResponse.class.getName(), Exception.class.getName()))); } } @Override public void getMigrationJob( GetMigrationJobRequest request, StreamObserver<MigrationJob> responseObserver) { Object response = responses.poll(); if (response instanceof MigrationJob) { requests.add(request); responseObserver.onNext(((MigrationJob) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), MigrationJob.class.getName(), Exception.class.getName()))); } } @Override public void createMigrationJob( CreateMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateMigrationJob( UpdateMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteMigrationJob( DeleteMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void startMigrationJob( StartMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method StartMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void stopMigrationJob( StopMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method StopMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void resumeMigrationJob( ResumeMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ResumeMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void promoteMigrationJob( PromoteMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method PromoteMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void verifyMigrationJob( VerifyMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method VerifyMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void restartMigrationJob( RestartMigrationJobRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method RestartMigrationJob, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void generateSshScript( GenerateSshScriptRequest request, StreamObserver<SshScript> responseObserver) { Object response = responses.poll(); if (response instanceof SshScript) { requests.add(request); responseObserver.onNext(((SshScript) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GenerateSshScript, expected %s or %s", response == null ? "null" : response.getClass().getName(), SshScript.class.getName(), Exception.class.getName()))); } } @Override public void generateTcpProxyScript( GenerateTcpProxyScriptRequest request, StreamObserver<TcpProxyScript> responseObserver) { Object response = responses.poll(); if (response instanceof TcpProxyScript) { requests.add(request); responseObserver.onNext(((TcpProxyScript) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GenerateTcpProxyScript, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), TcpProxyScript.class.getName(), Exception.class.getName()))); } } @Override public void listConnectionProfiles( ListConnectionProfilesRequest request, StreamObserver<ListConnectionProfilesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListConnectionProfilesResponse) { requests.add(request); responseObserver.onNext(((ListConnectionProfilesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListConnectionProfiles, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListConnectionProfilesResponse.class.getName(), Exception.class.getName()))); } } @Override public void getConnectionProfile( GetConnectionProfileRequest request, StreamObserver<ConnectionProfile> responseObserver) { Object response = responses.poll(); if (response instanceof ConnectionProfile) { requests.add(request); responseObserver.onNext(((ConnectionProfile) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetConnectionProfile, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ConnectionProfile.class.getName(), Exception.class.getName()))); } } @Override public void createConnectionProfile( CreateConnectionProfileRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateConnectionProfile, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateConnectionProfile( UpdateConnectionProfileRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateConnectionProfile, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteConnectionProfile( DeleteConnectionProfileRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteConnectionProfile, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void createPrivateConnection( CreatePrivateConnectionRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreatePrivateConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getPrivateConnection( GetPrivateConnectionRequest request, StreamObserver<PrivateConnection> responseObserver) { Object response = responses.poll(); if (response instanceof PrivateConnection) { requests.add(request); responseObserver.onNext(((PrivateConnection) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetPrivateConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), PrivateConnection.class.getName(), Exception.class.getName()))); } } @Override public void listPrivateConnections( ListPrivateConnectionsRequest request, StreamObserver<ListPrivateConnectionsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListPrivateConnectionsResponse) { requests.add(request); responseObserver.onNext(((ListPrivateConnectionsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListPrivateConnections, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ListPrivateConnectionsResponse.class.getName(), Exception.class.getName()))); } } @Override public void deletePrivateConnection( DeletePrivateConnectionRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeletePrivateConnection, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void getConversionWorkspace( GetConversionWorkspaceRequest request, StreamObserver<ConversionWorkspace> responseObserver) { Object response = responses.poll(); if (response instanceof ConversionWorkspace) { requests.add(request); responseObserver.onNext(((ConversionWorkspace) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetConversionWorkspace, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), ConversionWorkspace.class.getName(), Exception.class.getName()))); } } @Override public void listConversionWorkspaces( ListConversionWorkspacesRequest request, StreamObserver<ListConversionWorkspacesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListConversionWorkspacesResponse) { requests.add(request); responseObserver.onNext(((ListConversionWorkspacesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListConversionWorkspaces, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), ListConversionWorkspacesResponse.class.getName(), Exception.class.getName()))); } } @Override public void createConversionWorkspace( CreateConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void updateConversionWorkspace( UpdateConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method UpdateConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void deleteConversionWorkspace( DeleteConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void createMappingRule( CreateMappingRuleRequest request, StreamObserver<MappingRule> responseObserver) { Object response = responses.poll(); if (response instanceof MappingRule) { requests.add(request); responseObserver.onNext(((MappingRule) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CreateMappingRule, expected %s or %s", response == null ? "null" : response.getClass().getName(), MappingRule.class.getName(), Exception.class.getName()))); } } @Override public void deleteMappingRule( DeleteMappingRuleRequest request, StreamObserver<Empty> responseObserver) { Object response = responses.poll(); if (response instanceof Empty) { requests.add(request); responseObserver.onNext(((Empty) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DeleteMappingRule, expected %s or %s", response == null ? "null" : response.getClass().getName(), Empty.class.getName(), Exception.class.getName()))); } } @Override public void listMappingRules( ListMappingRulesRequest request, StreamObserver<ListMappingRulesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof ListMappingRulesResponse) { requests.add(request); responseObserver.onNext(((ListMappingRulesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ListMappingRules, expected %s or %s", response == null ? "null" : response.getClass().getName(), ListMappingRulesResponse.class.getName(), Exception.class.getName()))); } } @Override public void getMappingRule( GetMappingRuleRequest request, StreamObserver<MappingRule> responseObserver) { Object response = responses.poll(); if (response instanceof MappingRule) { requests.add(request); responseObserver.onNext(((MappingRule) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method GetMappingRule, expected %s or %s", response == null ? "null" : response.getClass().getName(), MappingRule.class.getName(), Exception.class.getName()))); } } @Override public void seedConversionWorkspace( SeedConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method SeedConversionWorkspace, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void importMappingRules( ImportMappingRulesRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ImportMappingRules, expected %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void convertConversionWorkspace( ConvertConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ConvertConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void commitConversionWorkspace( CommitConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method CommitConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void rollbackConversionWorkspace( RollbackConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method RollbackConversionWorkspace, expected" + " %s or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void applyConversionWorkspace( ApplyConversionWorkspaceRequest request, StreamObserver<Operation> responseObserver) { Object response = responses.poll(); if (response instanceof Operation) { requests.add(request); responseObserver.onNext(((Operation) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method ApplyConversionWorkspace, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), Operation.class.getName(), Exception.class.getName()))); } } @Override public void describeDatabaseEntities( DescribeDatabaseEntitiesRequest request, StreamObserver<DescribeDatabaseEntitiesResponse> responseObserver) { Object response = responses.poll(); if (response instanceof DescribeDatabaseEntitiesResponse) { requests.add(request); responseObserver.onNext(((DescribeDatabaseEntitiesResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DescribeDatabaseEntities, expected %s" + " or %s", response == null ? "null" : response.getClass().getName(), DescribeDatabaseEntitiesResponse.class.getName(), Exception.class.getName()))); } } @Override public void searchBackgroundJobs( SearchBackgroundJobsRequest request, StreamObserver<SearchBackgroundJobsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof SearchBackgroundJobsResponse) { requests.add(request); responseObserver.onNext(((SearchBackgroundJobsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method SearchBackgroundJobs, expected %s or" + " %s", response == null ? "null" : response.getClass().getName(), SearchBackgroundJobsResponse.class.getName(), Exception.class.getName()))); } } @Override public void describeConversionWorkspaceRevisions( DescribeConversionWorkspaceRevisionsRequest request, StreamObserver<DescribeConversionWorkspaceRevisionsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof DescribeConversionWorkspaceRevisionsResponse) { requests.add(request); responseObserver.onNext(((DescribeConversionWorkspaceRevisionsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method DescribeConversionWorkspaceRevisions," + " expected %s or %s", response == null ? "null" : response.getClass().getName(), DescribeConversionWorkspaceRevisionsResponse.class.getName(), Exception.class.getName()))); } } @Override public void fetchStaticIps( FetchStaticIpsRequest request, StreamObserver<FetchStaticIpsResponse> responseObserver) { Object response = responses.poll(); if (response instanceof FetchStaticIpsResponse) { requests.add(request); responseObserver.onNext(((FetchStaticIpsResponse) response)); responseObserver.onCompleted(); } else if (response instanceof Exception) { responseObserver.onError(((Exception) response)); } else { responseObserver.onError( new IllegalArgumentException( String.format( "Unrecognized response type %s for method FetchStaticIps, expected %s or %s", response == null ? "null" : response.getClass().getName(), FetchStaticIpsResponse.class.getName(), Exception.class.getName()))); } } }
googleapis/google-cloud-java
37,902
java-dataplex/google-cloud-dataplex/src/test/java/com/google/cloud/dataplex/v1/DataScanServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dataplex.v1; import static com.google.cloud.dataplex.v1.DataScanServiceClient.ListDataScanJobsPagedResponse; import static com.google.cloud.dataplex.v1.DataScanServiceClient.ListDataScansPagedResponse; import static com.google.cloud.dataplex.v1.DataScanServiceClient.ListLocationsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.api.gax.rpc.StatusCode; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Any; import com.google.protobuf.Empty; import com.google.protobuf.FieldMask; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class DataScanServiceClientTest { private static MockDataScanService mockDataScanService; private static MockIAMPolicy mockIAMPolicy; private static MockLocations mockLocations; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private DataScanServiceClient client; @BeforeClass public static void startStaticServer() { mockDataScanService = new MockDataScanService(); mockLocations = new MockLocations(); mockIAMPolicy = new MockIAMPolicy(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockDataScanService, mockLocations, mockIAMPolicy)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); DataScanServiceSettings settings = DataScanServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = DataScanServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createDataScanTest() throws Exception { DataScan expectedResponse = DataScan.newBuilder() .setName(DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]").toString()) .setUid("uid115792") .setDescription("description-1724546052") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setState(State.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setData(DataSource.newBuilder().build()) .setExecutionSpec(DataScan.ExecutionSpec.newBuilder().build()) .setExecutionStatus(DataScan.ExecutionStatus.newBuilder().build()) .setType(DataScanType.forNumber(0)) .build(); Operation resultOperation = Operation.newBuilder() .setName("createDataScanTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDataScanService.addResponse(resultOperation); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); DataScan dataScan = DataScan.newBuilder().build(); String dataScanId = "dataScanId1260787906"; DataScan actualResponse = client.createDataScanAsync(parent, dataScan, dataScanId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateDataScanRequest actualRequest = ((CreateDataScanRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(dataScan, actualRequest.getDataScan()); Assert.assertEquals(dataScanId, actualRequest.getDataScanId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createDataScanExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); DataScan dataScan = DataScan.newBuilder().build(); String dataScanId = "dataScanId1260787906"; client.createDataScanAsync(parent, dataScan, dataScanId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void createDataScanTest2() throws Exception { DataScan expectedResponse = DataScan.newBuilder() .setName(DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]").toString()) .setUid("uid115792") .setDescription("description-1724546052") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setState(State.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setData(DataSource.newBuilder().build()) .setExecutionSpec(DataScan.ExecutionSpec.newBuilder().build()) .setExecutionStatus(DataScan.ExecutionStatus.newBuilder().build()) .setType(DataScanType.forNumber(0)) .build(); Operation resultOperation = Operation.newBuilder() .setName("createDataScanTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDataScanService.addResponse(resultOperation); String parent = "parent-995424086"; DataScan dataScan = DataScan.newBuilder().build(); String dataScanId = "dataScanId1260787906"; DataScan actualResponse = client.createDataScanAsync(parent, dataScan, dataScanId).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateDataScanRequest actualRequest = ((CreateDataScanRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(dataScan, actualRequest.getDataScan()); Assert.assertEquals(dataScanId, actualRequest.getDataScanId()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createDataScanExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String parent = "parent-995424086"; DataScan dataScan = DataScan.newBuilder().build(); String dataScanId = "dataScanId1260787906"; client.createDataScanAsync(parent, dataScan, dataScanId).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void updateDataScanTest() throws Exception { DataScan expectedResponse = DataScan.newBuilder() .setName(DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]").toString()) .setUid("uid115792") .setDescription("description-1724546052") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setState(State.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setData(DataSource.newBuilder().build()) .setExecutionSpec(DataScan.ExecutionSpec.newBuilder().build()) .setExecutionStatus(DataScan.ExecutionStatus.newBuilder().build()) .setType(DataScanType.forNumber(0)) .build(); Operation resultOperation = Operation.newBuilder() .setName("updateDataScanTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDataScanService.addResponse(resultOperation); DataScan dataScan = DataScan.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); DataScan actualResponse = client.updateDataScanAsync(dataScan, updateMask).get(); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); UpdateDataScanRequest actualRequest = ((UpdateDataScanRequest) actualRequests.get(0)); Assert.assertEquals(dataScan, actualRequest.getDataScan()); Assert.assertEquals(updateMask, actualRequest.getUpdateMask()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void updateDataScanExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScan dataScan = DataScan.newBuilder().build(); FieldMask updateMask = FieldMask.newBuilder().build(); client.updateDataScanAsync(dataScan, updateMask).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteDataScanTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteDataScanTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDataScanService.addResponse(resultOperation); DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); client.deleteDataScanAsync(name).get(); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteDataScanRequest actualRequest = ((DeleteDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteDataScanExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); client.deleteDataScanAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void deleteDataScanTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); Operation resultOperation = Operation.newBuilder() .setName("deleteDataScanTest") .setDone(true) .setResponse(Any.pack(expectedResponse)) .build(); mockDataScanService.addResponse(resultOperation); String name = "name3373707"; client.deleteDataScanAsync(name).get(); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteDataScanRequest actualRequest = ((DeleteDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteDataScanExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String name = "name3373707"; client.deleteDataScanAsync(name).get(); Assert.fail("No exception raised"); } catch (ExecutionException e) { Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass()); InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause()); Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode()); } } @Test public void getDataScanTest() throws Exception { DataScan expectedResponse = DataScan.newBuilder() .setName(DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]").toString()) .setUid("uid115792") .setDescription("description-1724546052") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setState(State.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setData(DataSource.newBuilder().build()) .setExecutionSpec(DataScan.ExecutionSpec.newBuilder().build()) .setExecutionStatus(DataScan.ExecutionStatus.newBuilder().build()) .setType(DataScanType.forNumber(0)) .build(); mockDataScanService.addResponse(expectedResponse); DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); DataScan actualResponse = client.getDataScan(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDataScanRequest actualRequest = ((GetDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDataScanExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); client.getDataScan(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDataScanTest2() throws Exception { DataScan expectedResponse = DataScan.newBuilder() .setName(DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]").toString()) .setUid("uid115792") .setDescription("description-1724546052") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setState(State.forNumber(0)) .setCreateTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setData(DataSource.newBuilder().build()) .setExecutionSpec(DataScan.ExecutionSpec.newBuilder().build()) .setExecutionStatus(DataScan.ExecutionStatus.newBuilder().build()) .setType(DataScanType.forNumber(0)) .build(); mockDataScanService.addResponse(expectedResponse); String name = "name3373707"; DataScan actualResponse = client.getDataScan(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDataScanRequest actualRequest = ((GetDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDataScanExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String name = "name3373707"; client.getDataScan(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listDataScansTest() throws Exception { DataScan responsesElement = DataScan.newBuilder().build(); ListDataScansResponse expectedResponse = ListDataScansResponse.newBuilder() .setNextPageToken("") .addAllDataScans(Arrays.asList(responsesElement)) .build(); mockDataScanService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListDataScansPagedResponse pagedListResponse = client.listDataScans(parent); List<DataScan> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataScansList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListDataScansRequest actualRequest = ((ListDataScansRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listDataScansExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listDataScans(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listDataScansTest2() throws Exception { DataScan responsesElement = DataScan.newBuilder().build(); ListDataScansResponse expectedResponse = ListDataScansResponse.newBuilder() .setNextPageToken("") .addAllDataScans(Arrays.asList(responsesElement)) .build(); mockDataScanService.addResponse(expectedResponse); String parent = "parent-995424086"; ListDataScansPagedResponse pagedListResponse = client.listDataScans(parent); List<DataScan> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataScansList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListDataScansRequest actualRequest = ((ListDataScansRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listDataScansExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String parent = "parent-995424086"; client.listDataScans(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void runDataScanTest() throws Exception { RunDataScanResponse expectedResponse = RunDataScanResponse.newBuilder().setJob(DataScanJob.newBuilder().build()).build(); mockDataScanService.addResponse(expectedResponse); DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); RunDataScanResponse actualResponse = client.runDataScan(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RunDataScanRequest actualRequest = ((RunDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void runDataScanExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScanName name = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); client.runDataScan(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void runDataScanTest2() throws Exception { RunDataScanResponse expectedResponse = RunDataScanResponse.newBuilder().setJob(DataScanJob.newBuilder().build()).build(); mockDataScanService.addResponse(expectedResponse); String name = "name3373707"; RunDataScanResponse actualResponse = client.runDataScan(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); RunDataScanRequest actualRequest = ((RunDataScanRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void runDataScanExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String name = "name3373707"; client.runDataScan(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDataScanJobTest() throws Exception { DataScanJob expectedResponse = DataScanJob.newBuilder() .setName( DataScanJobName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]", "[JOB]").toString()) .setUid("uid115792") .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setMessage("message954925063") .setType(DataScanType.forNumber(0)) .build(); mockDataScanService.addResponse(expectedResponse); DataScanJobName name = DataScanJobName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]", "[JOB]"); DataScanJob actualResponse = client.getDataScanJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDataScanJobRequest actualRequest = ((GetDataScanJobRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDataScanJobExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScanJobName name = DataScanJobName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]", "[JOB]"); client.getDataScanJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getDataScanJobTest2() throws Exception { DataScanJob expectedResponse = DataScanJob.newBuilder() .setName( DataScanJobName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]", "[JOB]").toString()) .setUid("uid115792") .setCreateTime(Timestamp.newBuilder().build()) .setStartTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setMessage("message954925063") .setType(DataScanType.forNumber(0)) .build(); mockDataScanService.addResponse(expectedResponse); String name = "name3373707"; DataScanJob actualResponse = client.getDataScanJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetDataScanJobRequest actualRequest = ((GetDataScanJobRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getDataScanJobExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String name = "name3373707"; client.getDataScanJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listDataScanJobsTest() throws Exception { DataScanJob responsesElement = DataScanJob.newBuilder().build(); ListDataScanJobsResponse expectedResponse = ListDataScanJobsResponse.newBuilder() .setNextPageToken("") .addAllDataScanJobs(Arrays.asList(responsesElement)) .build(); mockDataScanService.addResponse(expectedResponse); DataScanName parent = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); ListDataScanJobsPagedResponse pagedListResponse = client.listDataScanJobs(parent); List<DataScanJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataScanJobsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListDataScanJobsRequest actualRequest = ((ListDataScanJobsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listDataScanJobsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { DataScanName parent = DataScanName.of("[PROJECT]", "[LOCATION]", "[DATASCAN]"); client.listDataScanJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listDataScanJobsTest2() throws Exception { DataScanJob responsesElement = DataScanJob.newBuilder().build(); ListDataScanJobsResponse expectedResponse = ListDataScanJobsResponse.newBuilder() .setNextPageToken("") .addAllDataScanJobs(Arrays.asList(responsesElement)) .build(); mockDataScanService.addResponse(expectedResponse); String parent = "parent-995424086"; ListDataScanJobsPagedResponse pagedListResponse = client.listDataScanJobs(parent); List<DataScanJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getDataScanJobsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListDataScanJobsRequest actualRequest = ((ListDataScanJobsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listDataScanJobsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String parent = "parent-995424086"; client.listDataScanJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void generateDataQualityRulesTest() throws Exception { GenerateDataQualityRulesResponse expectedResponse = GenerateDataQualityRulesResponse.newBuilder() .addAllRule(new ArrayList<DataQualityRule>()) .build(); mockDataScanService.addResponse(expectedResponse); String name = "name3373707"; GenerateDataQualityRulesResponse actualResponse = client.generateDataQualityRules(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockDataScanService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GenerateDataQualityRulesRequest actualRequest = ((GenerateDataQualityRulesRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void generateDataQualityRulesExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockDataScanService.addException(exception); try { String name = "name3373707"; client.generateDataQualityRules(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listLocationsTest() throws Exception { Location responsesElement = Location.newBuilder().build(); ListLocationsResponse expectedResponse = ListLocationsResponse.newBuilder() .setNextPageToken("") .addAllLocations(Arrays.asList(responsesElement)) .build(); mockLocations.addResponse(expectedResponse); ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); ListLocationsPagedResponse pagedListResponse = client.listLocations(request); List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertEquals(request.getFilter(), actualRequest.getFilter()); Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize()); Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listLocationsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { ListLocationsRequest request = ListLocationsRequest.newBuilder() .setName("name3373707") .setFilter("filter-1274492040") .setPageSize(883849137) .setPageToken("pageToken873572522") .build(); client.listLocations(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getLocationTest() throws Exception { Location expectedResponse = Location.newBuilder() .setName("name3373707") .setLocationId("locationId1541836720") .setDisplayName("displayName1714148973") .putAllLabels(new HashMap<String, String>()) .setMetadata(Any.newBuilder().build()) .build(); mockLocations.addResponse(expectedResponse); GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); Location actualResponse = client.getLocation(request); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockLocations.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0)); Assert.assertEquals(request.getName(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getLocationExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockLocations.addException(exception); try { GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); client.getLocation(request); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
37,939
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListSearchHypernymsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Response message for listing SearchHypernyms. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListSearchHypernymsResponse} */ public final class ListSearchHypernymsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListSearchHypernymsResponse) ListSearchHypernymsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSearchHypernymsResponse.newBuilder() to construct. private ListSearchHypernymsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSearchHypernymsResponse() { searchHypernyms_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSearchHypernymsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListSearchHypernymsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListSearchHypernymsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListSearchHypernymsResponse.class, com.google.cloud.visionai.v1.ListSearchHypernymsResponse.Builder.class); } public static final int SEARCH_HYPERNYMS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.SearchHypernym> searchHypernyms_; /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.SearchHypernym> getSearchHypernymsList() { return searchHypernyms_; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.SearchHypernymOrBuilder> getSearchHypernymsOrBuilderList() { return searchHypernyms_; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ @java.lang.Override public int getSearchHypernymsCount() { return searchHypernyms_.size(); } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.SearchHypernym getSearchHypernyms(int index) { return searchHypernyms_.get(index); } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.SearchHypernymOrBuilder getSearchHypernymsOrBuilder( int index) { return searchHypernyms_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < searchHypernyms_.size(); i++) { output.writeMessage(1, searchHypernyms_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < searchHypernyms_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, searchHypernyms_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListSearchHypernymsResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListSearchHypernymsResponse other = (com.google.cloud.visionai.v1.ListSearchHypernymsResponse) obj; if (!getSearchHypernymsList().equals(other.getSearchHypernymsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSearchHypernymsCount() > 0) { hash = (37 * hash) + SEARCH_HYPERNYMS_FIELD_NUMBER; hash = (53 * hash) + getSearchHypernymsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.visionai.v1.ListSearchHypernymsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for listing SearchHypernyms. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListSearchHypernymsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListSearchHypernymsResponse) com.google.cloud.visionai.v1.ListSearchHypernymsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListSearchHypernymsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListSearchHypernymsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListSearchHypernymsResponse.class, com.google.cloud.visionai.v1.ListSearchHypernymsResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListSearchHypernymsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (searchHypernymsBuilder_ == null) { searchHypernyms_ = java.util.Collections.emptyList(); } else { searchHypernyms_ = null; searchHypernymsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListSearchHypernymsResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListSearchHypernymsResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListSearchHypernymsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListSearchHypernymsResponse build() { com.google.cloud.visionai.v1.ListSearchHypernymsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListSearchHypernymsResponse buildPartial() { com.google.cloud.visionai.v1.ListSearchHypernymsResponse result = new com.google.cloud.visionai.v1.ListSearchHypernymsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListSearchHypernymsResponse result) { if (searchHypernymsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { searchHypernyms_ = java.util.Collections.unmodifiableList(searchHypernyms_); bitField0_ = (bitField0_ & ~0x00000001); } result.searchHypernyms_ = searchHypernyms_; } else { result.searchHypernyms_ = searchHypernymsBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListSearchHypernymsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListSearchHypernymsResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListSearchHypernymsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListSearchHypernymsResponse other) { if (other == com.google.cloud.visionai.v1.ListSearchHypernymsResponse.getDefaultInstance()) return this; if (searchHypernymsBuilder_ == null) { if (!other.searchHypernyms_.isEmpty()) { if (searchHypernyms_.isEmpty()) { searchHypernyms_ = other.searchHypernyms_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSearchHypernymsIsMutable(); searchHypernyms_.addAll(other.searchHypernyms_); } onChanged(); } } else { if (!other.searchHypernyms_.isEmpty()) { if (searchHypernymsBuilder_.isEmpty()) { searchHypernymsBuilder_.dispose(); searchHypernymsBuilder_ = null; searchHypernyms_ = other.searchHypernyms_; bitField0_ = (bitField0_ & ~0x00000001); searchHypernymsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSearchHypernymsFieldBuilder() : null; } else { searchHypernymsBuilder_.addAllMessages(other.searchHypernyms_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.SearchHypernym m = input.readMessage( com.google.cloud.visionai.v1.SearchHypernym.parser(), extensionRegistry); if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); searchHypernyms_.add(m); } else { searchHypernymsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.SearchHypernym> searchHypernyms_ = java.util.Collections.emptyList(); private void ensureSearchHypernymsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { searchHypernyms_ = new java.util.ArrayList<com.google.cloud.visionai.v1.SearchHypernym>(searchHypernyms_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.SearchHypernym, com.google.cloud.visionai.v1.SearchHypernym.Builder, com.google.cloud.visionai.v1.SearchHypernymOrBuilder> searchHypernymsBuilder_; /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.SearchHypernym> getSearchHypernymsList() { if (searchHypernymsBuilder_ == null) { return java.util.Collections.unmodifiableList(searchHypernyms_); } else { return searchHypernymsBuilder_.getMessageList(); } } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public int getSearchHypernymsCount() { if (searchHypernymsBuilder_ == null) { return searchHypernyms_.size(); } else { return searchHypernymsBuilder_.getCount(); } } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public com.google.cloud.visionai.v1.SearchHypernym getSearchHypernyms(int index) { if (searchHypernymsBuilder_ == null) { return searchHypernyms_.get(index); } else { return searchHypernymsBuilder_.getMessage(index); } } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder setSearchHypernyms( int index, com.google.cloud.visionai.v1.SearchHypernym value) { if (searchHypernymsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSearchHypernymsIsMutable(); searchHypernyms_.set(index, value); onChanged(); } else { searchHypernymsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder setSearchHypernyms( int index, com.google.cloud.visionai.v1.SearchHypernym.Builder builderForValue) { if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); searchHypernyms_.set(index, builderForValue.build()); onChanged(); } else { searchHypernymsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder addSearchHypernyms(com.google.cloud.visionai.v1.SearchHypernym value) { if (searchHypernymsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSearchHypernymsIsMutable(); searchHypernyms_.add(value); onChanged(); } else { searchHypernymsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder addSearchHypernyms( int index, com.google.cloud.visionai.v1.SearchHypernym value) { if (searchHypernymsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSearchHypernymsIsMutable(); searchHypernyms_.add(index, value); onChanged(); } else { searchHypernymsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder addSearchHypernyms( com.google.cloud.visionai.v1.SearchHypernym.Builder builderForValue) { if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); searchHypernyms_.add(builderForValue.build()); onChanged(); } else { searchHypernymsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder addSearchHypernyms( int index, com.google.cloud.visionai.v1.SearchHypernym.Builder builderForValue) { if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); searchHypernyms_.add(index, builderForValue.build()); onChanged(); } else { searchHypernymsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder addAllSearchHypernyms( java.lang.Iterable<? extends com.google.cloud.visionai.v1.SearchHypernym> values) { if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, searchHypernyms_); onChanged(); } else { searchHypernymsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder clearSearchHypernyms() { if (searchHypernymsBuilder_ == null) { searchHypernyms_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { searchHypernymsBuilder_.clear(); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public Builder removeSearchHypernyms(int index) { if (searchHypernymsBuilder_ == null) { ensureSearchHypernymsIsMutable(); searchHypernyms_.remove(index); onChanged(); } else { searchHypernymsBuilder_.remove(index); } return this; } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public com.google.cloud.visionai.v1.SearchHypernym.Builder getSearchHypernymsBuilder( int index) { return getSearchHypernymsFieldBuilder().getBuilder(index); } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public com.google.cloud.visionai.v1.SearchHypernymOrBuilder getSearchHypernymsOrBuilder( int index) { if (searchHypernymsBuilder_ == null) { return searchHypernyms_.get(index); } else { return searchHypernymsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.SearchHypernymOrBuilder> getSearchHypernymsOrBuilderList() { if (searchHypernymsBuilder_ != null) { return searchHypernymsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(searchHypernyms_); } } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public com.google.cloud.visionai.v1.SearchHypernym.Builder addSearchHypernymsBuilder() { return getSearchHypernymsFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.SearchHypernym.getDefaultInstance()); } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public com.google.cloud.visionai.v1.SearchHypernym.Builder addSearchHypernymsBuilder( int index) { return getSearchHypernymsFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.SearchHypernym.getDefaultInstance()); } /** * * * <pre> * The SearchHypernyms from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.SearchHypernym search_hypernyms = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.SearchHypernym.Builder> getSearchHypernymsBuilderList() { return getSearchHypernymsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.SearchHypernym, com.google.cloud.visionai.v1.SearchHypernym.Builder, com.google.cloud.visionai.v1.SearchHypernymOrBuilder> getSearchHypernymsFieldBuilder() { if (searchHypernymsBuilder_ == null) { searchHypernymsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.SearchHypernym, com.google.cloud.visionai.v1.SearchHypernym.Builder, com.google.cloud.visionai.v1.SearchHypernymOrBuilder>( searchHypernyms_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); searchHypernyms_ = null; } return searchHypernymsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListSearchHypernymsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListSearchHypernymsResponse) private static final com.google.cloud.visionai.v1.ListSearchHypernymsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListSearchHypernymsResponse(); } public static com.google.cloud.visionai.v1.ListSearchHypernymsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSearchHypernymsResponse> PARSER = new com.google.protobuf.AbstractParser<ListSearchHypernymsResponse>() { @java.lang.Override public ListSearchHypernymsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSearchHypernymsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSearchHypernymsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListSearchHypernymsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/truth
38,239
extensions/proto/src/main/java/com/google/common/truth/extensions/proto/MapWithProtoValuesSubject.java
/* * Copyright (c) 2016 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.truth.extensions.proto; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.asList; import static com.google.common.truth.extensions.proto.FieldScopeUtil.asList; import com.google.common.truth.FailureMetadata; import com.google.common.truth.MapSubject; import com.google.common.truth.Ordered; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.ExtensionRegistry; import com.google.protobuf.Message; import com.google.protobuf.TypeRegistry; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import org.jspecify.annotations.Nullable; /** * Truth subject for maps with protocol buffers for values. * * <p>{@code ProtoTruth.assertThat(actual).containsExactlyEntriesIn(expected)} performs the same * assertion as {@code Truth.assertThat(actual).containsExactlyEntriesIn(expected)}. By default, the * assertions are strict with respect to repeated field order, missing fields, etc. This behavior * can be changed with the configuration methods on this subject, e.g. {@code * ProtoTruth.assertThat(actual).ignoringRepeatedFieldOrder().containsExactly(expected)}. * * <p>By default, floating-point fields are compared using exact equality, which is <a * href="https://truth.dev/floating_point">probably not what you want</a> if the values are the * results of some arithmetic. To check for approximate equality, use {@link * #usingDoubleToleranceForValues}, {@link #usingFloatToleranceForValues}, and {@linkplain * #usingDoubleToleranceForFieldsForValues(double, int, int...) their per-field equivalents}. * * <p>Equality tests, and other methods, may yield slightly different behavior for versions 2 and 3 * of Protocol Buffers. If testing protos of multiple versions, make sure you understand the * behaviors of default and unknown fields so you don't under or over test. * * @param <M> the type of the message values in the map */ public class MapWithProtoValuesSubject<M extends Message> extends MapSubject { /* * Storing a FailureMetadata instance in a Subject subclass is generally a bad practice. For an * explanation of why it works out OK here, see LiteProtoSubject. */ private final FailureMetadata metadata; private final Map<?, M> actual; private final FluentEqualityConfig config; protected MapWithProtoValuesSubject(FailureMetadata failureMetadata, @Nullable Map<?, M> map) { this(failureMetadata, FluentEqualityConfig.defaultInstance(), map); } MapWithProtoValuesSubject( FailureMetadata failureMetadata, FluentEqualityConfig config, @Nullable Map<?, M> map) { super(failureMetadata, map); this.metadata = failureMetadata; this.actual = map; this.config = config; } ////////////////////////////////////////////////////////////////////////////////////////////////// // MapWithProtoValuesFluentAssertion Configuration ////////////////////////////////////////////////////////////////////////////////////////////////// MapWithProtoValuesFluentAssertion<M> usingConfig(FluentEqualityConfig newConfig) { return new MapWithProtoValuesFluentAssertionImpl<>( new MapWithProtoValuesSubject<>(metadata, newConfig, actual)); } /** * Specifies that the 'has' bit of individual fields should be ignored when comparing for * equality. * * <p>For version 2 Protocol Buffers, this setting determines whether two protos with the same * value for a field compare equal if one explicitly sets the value, and the other merely * implicitly uses the schema-defined default. This setting also determines whether unknown fields * should be considered in the comparison. By {@code ignoringFieldAbsence()}, unknown fields are * ignored, and value-equal fields as specified above are considered equal. * * <p>For version 3 Protocol Buffers, this setting does not affect primitive fields, because their * default value is indistinguishable from unset. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceForValues() { return usingConfig(config.ignoringFieldAbsence()); } /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsenceForValues() for details */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues( int firstFieldNumber, int... rest) { return usingConfig(config.ignoringFieldAbsenceOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that the 'has' bit of these explicitly specified top-level field numbers should be * ignored when comparing for equality. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if they are to be ignored as well. * * <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsenceForValues() for details */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues( Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringFieldAbsenceOfFields(fieldNumbers)); } /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsenceForValues() for details */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.ignoringFieldAbsenceOfFieldDescriptors(asList(firstFieldDescriptor, rest))); } /** * Specifies that the 'has' bit of these explicitly specified field descriptors should be ignored * when comparing for equality. Sub-fields must be specified explicitly if they are to be ignored * as well. * * <p>Use {@link #ignoringFieldAbsenceForValues()} instead to ignore the 'has' bit for all fields. * * @see #ignoringFieldAbsenceForValues() for details */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringFieldAbsenceOfFieldDescriptors(fieldDescriptors)); } /** * Specifies that the ordering of repeated fields, at all levels, should be ignored when comparing * for equality. * * <p>This setting applies to all repeated fields recursively, but it does not ignore structure. * For example, with {@link #ignoringRepeatedFieldOrderForValues()}, a repeated {@code int32} * field {@code bar}, set inside a repeated message field {@code foo}, the following protos will * all compare equal: * * <pre>{@code * message1: { * foo: { * bar: 1 * bar: 2 * } * foo: { * bar: 3 * bar: 4 * } * } * * message2: { * foo: { * bar: 2 * bar: 1 * } * foo: { * bar: 4 * bar: 3 * } * } * * message3: { * foo: { * bar: 4 * bar: 3 * } * foo: { * bar: 2 * bar: 1 * } * } * }</pre> * * <p>However, the following message will compare equal to none of these: * * <pre>{@code * message4: { * foo: { * bar: 1 * bar: 3 * } * foo: { * bar: 2 * bar: 4 * } * } * }</pre> * * <p>This setting does not apply to map fields, for which field order is always ignored. The * serialization order of map fields is undefined, and it may change from runtime to runtime. */ public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderForValues() { return usingConfig(config.ignoringRepeatedFieldOrder()); } /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrderForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues( int firstFieldNumber, int... rest) { return usingConfig(config.ignoringRepeatedFieldOrderOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that the ordering of repeated fields for these explicitly specified top-level field * numbers should be ignored when comparing for equality. Sub-fields must be specified explicitly * (via {@link FieldDescriptor}) if their orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrderForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues( Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringRepeatedFieldOrderOfFields(fieldNumbers)); } /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrderForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.ignoringRepeatedFieldOrderOfFieldDescriptors(asList(firstFieldDescriptor, rest))); } /** * Specifies that the ordering of repeated fields for these explicitly specified field descriptors * should be ignored when comparing for equality. Sub-fields must be specified explicitly if their * orders are to be ignored as well. * * <p>Use {@link #ignoringRepeatedFieldOrderForValues()} instead to ignore order for all fields. * * @see #ignoringRepeatedFieldOrderForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringRepeatedFieldOrderOfFieldDescriptors(fieldDescriptors)); } /** * Specifies that, for all repeated and map fields, any elements in the 'actual' proto which are * not found in the 'expected' proto are ignored, with the exception of fields in the expected * proto which are empty. To ignore empty repeated fields as well, use {@link * #comparingExpectedFieldsOnlyForValues}. * * <p>This rule is applied independently from {@link #ignoringRepeatedFieldOrderForValues}. If * ignoring repeated field order AND extra repeated field elements, all that is tested is that the * expected elements comprise a subset of the actual elements. If not ignoring repeated field * order, but still ignoring extra repeated field elements, the actual elements must contain a * subsequence that matches the expected elements for the test to pass. (The subsequence rule does * not apply to Map fields, which are always compared by key.) */ public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsForValues() { return usingConfig(config.ignoringExtraRepeatedFieldElements()); } /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all * fields. * * @see #ignoringExtraRepeatedFieldElementsForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues( int firstFieldNumber, int... rest) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFields(asList(firstFieldNumber, rest))); } /** * Specifies that extra repeated field elements for these explicitly specified top-level field * numbers should be ignored. Sub-fields must be specified explicitly (via {@link * FieldDescriptor}) if their extra elements are to be ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all * fields. * * @see #ignoringExtraRepeatedFieldElementsForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues( Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringExtraRepeatedFieldElementsOfFields(fieldNumbers)); } /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all * fields. * * @see #ignoringExtraRepeatedFieldElementsForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors( asList(firstFieldDescriptor, rest))); } /** * Specifies that extra repeated field elements for these explicitly specified field descriptors * should be ignored. Sub-fields must be specified explicitly if their extra elements are to be * ignored as well. * * <p>Use {@link #ignoringExtraRepeatedFieldElementsForValues()} instead to ignore these for all * fields. * * @see #ignoringExtraRepeatedFieldElementsForValues() for details. */ public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig( config.ignoringExtraRepeatedFieldElementsOfFieldDescriptors(fieldDescriptors)); } /** * Compares double fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForValues(double tolerance) { return usingConfig(config.usingDoubleTolerance(tolerance)); } /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues( double tolerance, int firstFieldNumber, int... rest) { return usingConfig( config.usingDoubleToleranceForFields(tolerance, asList(firstFieldNumber, rest))); } /** * Compares double fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues( double tolerance, Iterable<Integer> fieldNumbers) { return usingConfig(config.usingDoubleToleranceForFields(tolerance, fieldNumbers)); } /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues( double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.usingDoubleToleranceForFieldDescriptors( tolerance, asList(firstFieldDescriptor, rest))); } /** * Compares double fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues( double tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.usingDoubleToleranceForFieldDescriptors(tolerance, fieldDescriptors)); } /** * Compares float fields as equal if they are both finite and their absolute difference is less * than or equal to {@code tolerance}. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForValues(float tolerance) { return usingConfig(config.usingFloatTolerance(tolerance)); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues( float tolerance, int firstFieldNumber, int... rest) { return usingConfig( config.usingFloatToleranceForFields(tolerance, asList(firstFieldNumber, rest))); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues( float tolerance, Iterable<Integer> fieldNumbers) { return usingConfig(config.usingFloatToleranceForFields(tolerance, fieldNumbers)); } /** * Compares float fields with these explicitly specified fields using the provided absolute * tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues( float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return usingConfig( config.usingFloatToleranceForFieldDescriptors( tolerance, asList(firstFieldDescriptor, rest))); } /** * Compares float fields with these explicitly specified top-level field numbers using the * provided absolute tolerance. * * @param tolerance A finite, non-negative tolerance. */ public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues( float tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.usingFloatToleranceForFieldDescriptors(tolerance, fieldDescriptors)); } /** * Limits the comparison of Protocol buffers to the fields set in the expected proto(s). When * multiple protos are specified, the comparison is limited to the union of set fields in all the * expected protos. * * <p>The "expected proto(s)" are those passed to the method at the end of the call chain, such as * {@link #containsEntry} or {@link #containsExactlyEntriesIn}. * * <p>Fields not set in the expected proto(s) are ignored. In particular, proto3 fields which have * their default values are ignored, as these are indistinguishable from unset fields. If you want * to assert that a proto3 message has certain fields with default values, you cannot use this * method. */ public MapWithProtoValuesFluentAssertion<M> comparingExpectedFieldsOnlyForValues() { return usingConfig(config.comparingExpectedFieldsOnly()); } /** * Limits the comparison of Protocol buffers to the defined {@link FieldScope}. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the intersection of {@link FieldScope}s {@code X} and {@code Y}. * * <p>By default, {@link MapWithProtoValuesFluentAssertion} is constrained to {@link * FieldScopes#all()}, that is, no fields are excluded from comparison. */ public MapWithProtoValuesFluentAssertion<M> withPartialScopeForValues(FieldScope fieldScope) { return usingConfig(config.withPartialScope(checkNotNull(fieldScope, "fieldScope"))); } /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues( int firstFieldNumber, int... rest) { return ignoringFieldsForValues(asList(firstFieldNumber, rest)); } /** * Excludes the top-level message fields with the given tag numbers from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * numbers are ignored, and all sub-messages of type {@code M} will also have these field numbers * ignored. * * <p>If an invalid field number is supplied, the terminal comparison operation will throw a * runtime exception. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues( Iterable<Integer> fieldNumbers) { return usingConfig(config.ignoringFields(fieldNumbers)); } /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return ignoringFieldDescriptorsForValues(asList(firstFieldDescriptor, rest)); } /** * Excludes all message fields matching the given {@link FieldDescriptor}s from the comparison. * * <p>This method adds on any previous {@link FieldScope} related settings, overriding previous * changes to ensure the specified fields are ignored recursively. All sub-fields of these field * descriptors are ignored, no matter where they occur in the tree. * * <p>If a field descriptor which does not, or cannot occur in the proto structure is supplied, it * is silently ignored. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return usingConfig(config.ignoringFieldDescriptors(fieldDescriptors)); } /** * Excludes all specific field paths under the argument {@link FieldScope} from the comparison. * * <p>This method is additive and has well-defined ordering semantics. If the invoking {@link * ProtoFluentAssertion} is already scoped to a {@link FieldScope} {@code X}, and this method is * invoked with {@link FieldScope} {@code Y}, the resultant {@link ProtoFluentAssertion} is * constrained to the subtraction of {@code X - Y}. * * <p>By default, {@link ProtoFluentAssertion} is constrained to {@link FieldScopes#all()}, that * is, no fields are excluded from comparison. */ public MapWithProtoValuesFluentAssertion<M> ignoringFieldScopeForValues(FieldScope fieldScope) { return usingConfig(config.ignoringFieldScope(checkNotNull(fieldScope, "fieldScope"))); } /** * If set, in the event of a comparison failure, the error message printed will list only those * specific fields that did not match between the actual and expected values. Useful for very * large protocol buffers. * * <p>This a purely cosmetic setting, and it has no effect on the behavior of the test. */ public MapWithProtoValuesFluentAssertion<M> reportingMismatchesOnlyForValues() { return usingConfig(config.reportingMismatchesOnly()); } /** * Specifies the {@link TypeRegistry} and {@link ExtensionRegistry} to use for {@link * com.google.protobuf.Any Any} messages. * * <p>To compare the value of an {@code Any} message, ProtoTruth looks in the given type registry * for a descriptor for the message's type URL: * * <ul> * <li>If ProtoTruth finds a descriptor, it unpacks the value and compares it against the * expected value, respecting any configuration methods used for the assertion. * <li>If ProtoTruth does not find a descriptor (or if the value can't be deserialized with the * descriptor), it compares the raw, serialized bytes of the expected and actual values. * </ul> * * <p>When ProtoTruth unpacks a value, it is parsing a serialized proto. That proto may contain * extensions. To look up those extensions, ProtoTruth uses the provided {@link * ExtensionRegistry}. * * @since 1.1 */ public MapWithProtoValuesFluentAssertion<M> unpackingAnyUsingForValues( TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) { return usingConfig(config.unpackingAnyUsing(typeRegistry, extensionRegistry)); } ////////////////////////////////////////////////////////////////////////////////////////////////// // UsingCorrespondence Methods ////////////////////////////////////////////////////////////////////////////////////////////////// private MapSubject.UsingCorrespondence<M, M> usingCorrespondence( Iterable<? extends M> expectedValues) { return comparingValuesUsing( config .withExpectedMessages(expectedValues) .<M>toCorrespondence(FieldScopeUtil.getSingleDescriptor(actual.values()))); } // The UsingCorrespondence methods have conflicting erasure with default MapSubject methods, // so we can't implement them both on the same class, but we want to define both so // MapWithProtoValuesSubjects are interchangeable with MapSubjects when no configuration is // specified. So, we implement a dumb, private delegator to return instead. private static final class MapWithProtoValuesFluentAssertionImpl<M extends Message> implements MapWithProtoValuesFluentAssertion<M> { private final MapWithProtoValuesSubject<M> subject; MapWithProtoValuesFluentAssertionImpl(MapWithProtoValuesSubject<M> subject) { this.subject = subject; } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceForValues() { return subject.ignoringFieldAbsenceForValues(); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues( int firstFieldNumber, int... rest) { return subject.ignoringFieldAbsenceOfFieldsForValues(firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldsForValues( Iterable<Integer> fieldNumbers) { return subject.ignoringFieldAbsenceOfFieldsForValues(fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.ignoringFieldAbsenceOfFieldDescriptorsForValues(firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldAbsenceOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return subject.ignoringFieldAbsenceOfFieldDescriptorsForValues(fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderForValues() { return subject.ignoringRepeatedFieldOrderForValues(); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues( int firstFieldNumber, int... rest) { return subject.ignoringRepeatedFieldOrderOfFieldsForValues(firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldsForValues( Iterable<Integer> fieldNumbers) { return subject.ignoringRepeatedFieldOrderOfFieldsForValues(fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.ignoringRepeatedFieldOrderOfFieldDescriptorsForValues( firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringRepeatedFieldOrderOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return subject.ignoringRepeatedFieldOrderOfFieldDescriptorsForValues(fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsForValues() { return subject.ignoringExtraRepeatedFieldElementsForValues(); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues( int firstFieldNumber, int... rest) { return subject.ignoringExtraRepeatedFieldElementsOfFieldsForValues(firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldsForValues( Iterable<Integer> fieldNumbers) { return subject.ignoringExtraRepeatedFieldElementsOfFieldsForValues(fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return subject.ignoringExtraRepeatedFieldElementsOfFieldDescriptorsForValues( fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForValues(double tolerance) { return subject.usingDoubleToleranceForValues(tolerance); } @Override public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues( double tolerance, int firstFieldNumber, int... rest) { return subject.usingDoubleToleranceForFieldsForValues(tolerance, firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldsForValues( double tolerance, Iterable<Integer> fieldNumbers) { return subject.usingDoubleToleranceForFieldsForValues(tolerance, fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues( double tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.usingDoubleToleranceForFieldDescriptorsForValues( tolerance, firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> usingDoubleToleranceForFieldDescriptorsForValues( double tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return subject.usingDoubleToleranceForFieldDescriptorsForValues(tolerance, fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForValues(float tolerance) { return subject.usingFloatToleranceForValues(tolerance); } @Override public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues( float tolerance, int firstFieldNumber, int... rest) { return subject.usingFloatToleranceForFieldsForValues(tolerance, firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldsForValues( float tolerance, Iterable<Integer> fieldNumbers) { return subject.usingFloatToleranceForFieldsForValues(tolerance, fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues( float tolerance, FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.usingFloatToleranceForFieldDescriptorsForValues( tolerance, firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> usingFloatToleranceForFieldDescriptorsForValues( float tolerance, Iterable<FieldDescriptor> fieldDescriptors) { return subject.usingFloatToleranceForFieldDescriptorsForValues(tolerance, fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> comparingExpectedFieldsOnlyForValues() { return subject.comparingExpectedFieldsOnlyForValues(); } @Override public MapWithProtoValuesFluentAssertion<M> withPartialScopeForValues(FieldScope fieldScope) { return subject.withPartialScopeForValues(fieldScope); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues( int firstFieldNumber, int... rest) { return subject.ignoringFieldsForValues(firstFieldNumber, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldsForValues( Iterable<Integer> fieldNumbers) { return subject.ignoringFieldsForValues(fieldNumbers); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues( FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) { return subject.ignoringFieldDescriptorsForValues(firstFieldDescriptor, rest); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldDescriptorsForValues( Iterable<FieldDescriptor> fieldDescriptors) { return subject.ignoringFieldDescriptorsForValues(fieldDescriptors); } @Override public MapWithProtoValuesFluentAssertion<M> ignoringFieldScopeForValues(FieldScope fieldScope) { return subject.ignoringFieldScopeForValues(fieldScope); } @Override public MapWithProtoValuesFluentAssertion<M> reportingMismatchesOnlyForValues() { return subject.reportingMismatchesOnlyForValues(); } @Override public MapWithProtoValuesFluentAssertion<M> unpackingAnyUsingForValues( TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) { return subject.unpackingAnyUsingForValues(typeRegistry, extensionRegistry); } @Override public void containsEntry(@Nullable Object expectedKey, @Nullable M expectedValue) { subject .usingCorrespondence(Arrays.asList(expectedValue)) .containsEntry(expectedKey, expectedValue); } @Override public void doesNotContainEntry(@Nullable Object excludedKey, @Nullable M excludedValue) { subject .usingCorrespondence(Arrays.asList(excludedValue)) .doesNotContainEntry(excludedKey, excludedValue); } @Override @CanIgnoreReturnValue @SuppressWarnings("unchecked") // ClassCastException is fine public Ordered containsExactly(@Nullable Object k0, @Nullable M v0, @Nullable Object... rest) { List<M> expectedValues = new ArrayList<>(); expectedValues.add(v0); for (int i = 1; i < rest.length; i += 2) { expectedValues.add((M) rest[i]); } return subject.usingCorrespondence(expectedValues).containsExactly(k0, v0, rest); } @Override @CanIgnoreReturnValue public Ordered containsExactlyEntriesIn(Map<?, ? extends M> expectedMap) { return subject .usingCorrespondence(expectedMap.values()) .containsExactlyEntriesIn(expectedMap); } @SuppressWarnings("DoNotCall") @Override @Deprecated public boolean equals(@Nullable Object o) { return subject.equals(o); } @SuppressWarnings("DoNotCall") @Override @Deprecated public int hashCode() { return subject.hashCode(); } } }
googleapis/google-cloud-java
37,732
java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/ListKmsConfigsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/netapp/v1/kms.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.netapp.v1; /** * * * <pre> * ListKmsConfigsRequest lists KMS Configs. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.ListKmsConfigsRequest} */ public final class ListKmsConfigsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.ListKmsConfigsRequest) ListKmsConfigsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListKmsConfigsRequest.newBuilder() to construct. private ListKmsConfigsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListKmsConfigsRequest() { parent_ = ""; pageToken_ = ""; orderBy_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListKmsConfigsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.KmsProto .internal_static_google_cloud_netapp_v1_ListKmsConfigsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.KmsProto .internal_static_google_cloud_netapp_v1_ListKmsConfigsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.ListKmsConfigsRequest.class, com.google.cloud.netapp.v1.ListKmsConfigsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * The maximum number of items to return. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object orderBy_ = ""; /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, orderBy_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, orderBy_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.netapp.v1.ListKmsConfigsRequest)) { return super.equals(obj); } com.google.cloud.netapp.v1.ListKmsConfigsRequest other = (com.google.cloud.netapp.v1.ListKmsConfigsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.netapp.v1.ListKmsConfigsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * ListKmsConfigsRequest lists KMS Configs. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.ListKmsConfigsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.ListKmsConfigsRequest) com.google.cloud.netapp.v1.ListKmsConfigsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.KmsProto .internal_static_google_cloud_netapp_v1_ListKmsConfigsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.KmsProto .internal_static_google_cloud_netapp_v1_ListKmsConfigsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.ListKmsConfigsRequest.class, com.google.cloud.netapp.v1.ListKmsConfigsRequest.Builder.class); } // Construct using com.google.cloud.netapp.v1.ListKmsConfigsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; orderBy_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.netapp.v1.KmsProto .internal_static_google_cloud_netapp_v1_ListKmsConfigsRequest_descriptor; } @java.lang.Override public com.google.cloud.netapp.v1.ListKmsConfigsRequest getDefaultInstanceForType() { return com.google.cloud.netapp.v1.ListKmsConfigsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.netapp.v1.ListKmsConfigsRequest build() { com.google.cloud.netapp.v1.ListKmsConfigsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.netapp.v1.ListKmsConfigsRequest buildPartial() { com.google.cloud.netapp.v1.ListKmsConfigsRequest result = new com.google.cloud.netapp.v1.ListKmsConfigsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.netapp.v1.ListKmsConfigsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.orderBy_ = orderBy_; } if (((from_bitField0_ & 0x00000010) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.netapp.v1.ListKmsConfigsRequest) { return mergeFrom((com.google.cloud.netapp.v1.ListKmsConfigsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.netapp.v1.ListKmsConfigsRequest other) { if (other == com.google.cloud.netapp.v1.ListKmsConfigsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; bitField0_ |= 0x00000008; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000010; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { orderBy_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 case 42: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000010; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of items to return. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of items to return. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The maximum number of items to return. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The next_page_token value to use if there are additional * results to retrieve for this list request. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Sort results. Supported values are "name", "name desc" or "" (unsorted). * </pre> * * <code>string order_by = 4;</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** * * * <pre> * List filter. * </pre> * * <code>string filter = 5;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000010; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.ListKmsConfigsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.ListKmsConfigsRequest) private static final com.google.cloud.netapp.v1.ListKmsConfigsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.ListKmsConfigsRequest(); } public static com.google.cloud.netapp.v1.ListKmsConfigsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListKmsConfigsRequest> PARSER = new com.google.protobuf.AbstractParser<ListKmsConfigsRequest>() { @java.lang.Override public ListKmsConfigsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListKmsConfigsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListKmsConfigsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.netapp.v1.ListKmsConfigsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,833
java-meet/proto-google-cloud-meet-v2beta/src/main/java/com/google/apps/meet/v2beta/ParticipantSession.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/meet/v2beta/resource.proto // Protobuf Java Version: 3.25.8 package com.google.apps.meet.v2beta; /** * * * <pre> * Refers to each unique join or leave session when a user joins a conference * from a device. Note that any time a user joins the conference a new unique ID * is assigned. That means if a user joins a space multiple times from the same * device, they're assigned different IDs, and are also be treated as different * participant sessions. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.ParticipantSession} */ public final class ParticipantSession extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.meet.v2beta.ParticipantSession) ParticipantSessionOrBuilder { private static final long serialVersionUID = 0L; // Use ParticipantSession.newBuilder() to construct. private ParticipantSession(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ParticipantSession() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ParticipantSession(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ResourceProto .internal_static_google_apps_meet_v2beta_ParticipantSession_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ResourceProto .internal_static_google_apps_meet_v2beta_ParticipantSession_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.ParticipantSession.class, com.google.apps.meet.v2beta.ParticipantSession.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int START_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp startTime_; /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The startTime. */ @java.lang.Override public com.google.protobuf.Timestamp getStartTime() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } public static final int END_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Timestamp endTime_; /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the endTime field is set. */ @java.lang.Override public boolean hasEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The endTime. */ @java.lang.Override public com.google.protobuf.Timestamp getEndTime() { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getEndTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getEndTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.meet.v2beta.ParticipantSession)) { return super.equals(obj); } com.google.apps.meet.v2beta.ParticipantSession other = (com.google.apps.meet.v2beta.ParticipantSession) obj; if (!getName().equals(other.getName())) return false; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (hasEndTime() != other.hasEndTime()) return false; if (hasEndTime()) { if (!getEndTime().equals(other.getEndTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } if (hasEndTime()) { hash = (37 * hash) + END_TIME_FIELD_NUMBER; hash = (53 * hash) + getEndTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.ParticipantSession parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ParticipantSession parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ParticipantSession parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.apps.meet.v2beta.ParticipantSession prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Refers to each unique join or leave session when a user joins a conference * from a device. Note that any time a user joins the conference a new unique ID * is assigned. That means if a user joins a space multiple times from the same * device, they're assigned different IDs, and are also be treated as different * participant sessions. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.ParticipantSession} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.meet.v2beta.ParticipantSession) com.google.apps.meet.v2beta.ParticipantSessionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ResourceProto .internal_static_google_apps_meet_v2beta_ParticipantSession_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ResourceProto .internal_static_google_apps_meet_v2beta_ParticipantSession_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.ParticipantSession.class, com.google.apps.meet.v2beta.ParticipantSession.Builder.class); } // Construct using com.google.apps.meet.v2beta.ParticipantSession.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); getEndTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } endTime_ = null; if (endTimeBuilder_ != null) { endTimeBuilder_.dispose(); endTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.meet.v2beta.ResourceProto .internal_static_google_apps_meet_v2beta_ParticipantSession_descriptor; } @java.lang.Override public com.google.apps.meet.v2beta.ParticipantSession getDefaultInstanceForType() { return com.google.apps.meet.v2beta.ParticipantSession.getDefaultInstance(); } @java.lang.Override public com.google.apps.meet.v2beta.ParticipantSession build() { com.google.apps.meet.v2beta.ParticipantSession result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.meet.v2beta.ParticipantSession buildPartial() { com.google.apps.meet.v2beta.ParticipantSession result = new com.google.apps.meet.v2beta.ParticipantSession(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.apps.meet.v2beta.ParticipantSession result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.endTime_ = endTimeBuilder_ == null ? endTime_ : endTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.meet.v2beta.ParticipantSession) { return mergeFrom((com.google.apps.meet.v2beta.ParticipantSession) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.meet.v2beta.ParticipantSession other) { if (other == com.google.apps.meet.v2beta.ParticipantSession.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } if (other.hasEndTime()) { mergeEndTime(other.getEndTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getEndTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Identifier. Session id. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.Timestamp startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The startTime. */ public com.google.protobuf.Timestamp getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && startTime_ != null && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000002); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } } /** * * * <pre> * Output only. Timestamp when the user session starts. * </pre> * * <code>.google.protobuf.Timestamp start_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } private com.google.protobuf.Timestamp endTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> endTimeBuilder_; /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the endTime field is set. */ public boolean hasEndTime() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The endTime. */ public com.google.protobuf.Timestamp getEndTime() { if (endTimeBuilder_ == null) { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } else { return endTimeBuilder_.getMessage(); } } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setEndTime(com.google.protobuf.Timestamp value) { if (endTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } endTime_ = value; } else { endTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (endTimeBuilder_ == null) { endTime_ = builderForValue.build(); } else { endTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeEndTime(com.google.protobuf.Timestamp value) { if (endTimeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && endTime_ != null && endTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getEndTimeBuilder().mergeFrom(value); } else { endTime_ = value; } } else { endTimeBuilder_.mergeFrom(value); } if (endTime_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearEndTime() { bitField0_ = (bitField0_ & ~0x00000004); endTime_ = null; if (endTimeBuilder_ != null) { endTimeBuilder_.dispose(); endTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getEndTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { if (endTimeBuilder_ != null) { return endTimeBuilder_.getMessageOrBuilder(); } else { return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; } } /** * * * <pre> * Output only. Timestamp when the user session ends. Unset if the user * session hasn’t ended. * </pre> * * <code>.google.protobuf.Timestamp end_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getEndTimeFieldBuilder() { if (endTimeBuilder_ == null) { endTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getEndTime(), getParentForChildren(), isClean()); endTime_ = null; } return endTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.meet.v2beta.ParticipantSession) } // @@protoc_insertion_point(class_scope:google.apps.meet.v2beta.ParticipantSession) private static final com.google.apps.meet.v2beta.ParticipantSession DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.meet.v2beta.ParticipantSession(); } public static com.google.apps.meet.v2beta.ParticipantSession getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ParticipantSession> PARSER = new com.google.protobuf.AbstractParser<ParticipantSession>() { @java.lang.Override public ParticipantSession parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ParticipantSession> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ParticipantSession> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.meet.v2beta.ParticipantSession getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }